mirror of
https://github.com/n08i40k/schedule-parser-rusted.git
synced 2025-12-06 17:57:47 +03:00
Compare commits
76 Commits
283858fea3
...
dependabot
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
6e53bffca2 | ||
|
2442641479
|
|||
|
ac16c96e5e
|
|||
|
622464e4c3
|
|||
|
39c60ef939
|
|||
|
d1ef5c032e
|
|||
|
b635750e28
|
|||
|
a59fff927d
|
|||
|
cdc89b5bcd
|
|||
|
ad86f6cd64
|
|||
|
a3b4a501db
|
|||
|
df0e99a4d0
|
|||
|
a8cf8fb0f5
|
|||
|
7ed866138e
|
|||
|
7bac48f8fc
|
|||
|
191ec36fef
|
|||
|
f121a04f1b
|
|||
|
df74ab03a1
|
|||
|
1b79d1cf1e
|
|||
|
2b9b1ea66b
|
|||
|
ca713d8d51
|
|||
|
69df538467
|
|||
|
aa019f8fcf
|
|||
|
b664ba578d
|
|||
|
983967f8b0
|
|||
|
e5760120e2
|
|||
|
a28fb66dd4
|
|||
|
3780fb3136
|
|||
|
6c71bc19f5
|
|||
|
2d0041dc8b
|
|||
|
b5d372e109
|
|||
|
84dca02c34
|
|||
|
6c9d3b3b31
|
|||
|
a348b1b99b
|
|||
|
ff12ee5da2
|
|||
|
35f707901f
|
|||
|
edea6c5424
|
|||
|
fdbb872fc3
|
|||
|
dbc800fef1
|
|||
|
e729d84c93
|
|||
|
cc7adf10ed
|
|||
|
57c1699c9a
|
|||
|
298c4f4dd3
|
|||
|
e3904a255b
|
|||
|
829c1cf68d
|
|||
|
6a535f8d73
|
|||
|
5e5cd53f46
|
|||
|
8d59e37976
|
|||
|
5e39fc9acc
|
|||
|
7c973bfda0
|
|||
|
8fba0fc709
|
|||
|
983ff4fa5e
|
|||
|
fb6f3fc05f
|
|||
|
e64011ba16
|
|||
|
6a106a366c
|
|||
|
4fca22662c
|
|||
|
d23092a32a
|
|||
|
01bfa38969
|
|||
|
851ec9225f
|
|||
|
8de1891724
|
|||
|
4cf6df379e
|
|||
|
ba8b164b6a
|
|||
|
ff9d7d6c3a
|
|||
|
9090716f87
|
|||
|
ee992f1b55
|
|||
|
7f71fb1616
|
|||
|
234055eaeb
|
|||
|
fceffb900d
|
|||
|
49ce0005dc
|
|||
|
4c738085f2
|
|||
|
20602eb863
|
|||
|
e04d462223
|
|||
|
22af02464d
|
|||
|
9a517519db
|
|||
|
65376e75f7
|
|||
|
bef6163c1b
|
26
.env.test
Normal file
26
.env.test
Normal file
@@ -0,0 +1,26 @@
|
|||||||
|
# Schedule
|
||||||
|
# SCHEDULE_INIT_URL=
|
||||||
|
SCHEDULE_DISABLE_AUTO_UPDATE=1
|
||||||
|
|
||||||
|
# Basic authorization
|
||||||
|
JWT_SECRET="test-secret-at-least-256-bits-used"
|
||||||
|
|
||||||
|
# VKID
|
||||||
|
VK_ID_CLIENT_ID=0
|
||||||
|
VK_ID_REDIRECT_URI="vk0://vk.com/blank.html"
|
||||||
|
|
||||||
|
# Telegram Mini-App
|
||||||
|
TELEGRAM_BOT_ID=0
|
||||||
|
TELEGRAM_MINI_APP_HOST=example.com
|
||||||
|
TELEGRAM_TEST_DC=false
|
||||||
|
|
||||||
|
# Yandex Cloud
|
||||||
|
YANDEX_CLOUD_API_KEY=""
|
||||||
|
YANDEX_CLOUD_FUNC_ID=""
|
||||||
|
|
||||||
|
# Firebase
|
||||||
|
# GOOGLE_APPLICATION_CREDENTIALS=
|
||||||
|
|
||||||
|
# LOGGING
|
||||||
|
RUST_BACKTRACE=1
|
||||||
|
# RUST_LOG=debug
|
||||||
145
.github/workflows/build.yml
vendored
Normal file
145
.github/workflows/build.yml
vendored
Normal file
@@ -0,0 +1,145 @@
|
|||||||
|
name: build
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches: [ "master" ]
|
||||||
|
tags-ignore: [ "release/v*" ]
|
||||||
|
|
||||||
|
permissions:
|
||||||
|
contents: write
|
||||||
|
|
||||||
|
env:
|
||||||
|
CARGO_TERM_COLOR: always
|
||||||
|
|
||||||
|
BINARY_NAME: schedule-parser-rusted
|
||||||
|
|
||||||
|
TEST_DB: ${{ secrets.TEST_DATABASE_URL }}
|
||||||
|
|
||||||
|
SENTRY_AUTH_TOKEN: ${{ secrets.SENTRY_AUTH_TOKEN }}
|
||||||
|
SENTRY_ORG: ${{ secrets.SENTRY_ORG }}
|
||||||
|
SENTRY_PROJECT: ${{ secrets.SENTRY_PROJECT }}
|
||||||
|
|
||||||
|
DOCKER_IMAGE_NAME: ${{ github.repository }}
|
||||||
|
|
||||||
|
DOCKER_REGISTRY_HOST: registry.n08i40k.ru
|
||||||
|
DOCKER_REGISTRY_USERNAME: ${{ github.repository_owner }}
|
||||||
|
DOCKER_REGISTRY_PASSWORD: ${{ secrets.DOCKER_REGISTRY_PASSWORD }}
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
test:
|
||||||
|
name: Test
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
fetch-depth: 0
|
||||||
|
|
||||||
|
- name: Setup Rust
|
||||||
|
uses: actions-rust-lang/setup-rust-toolchain@v1.11.0
|
||||||
|
with:
|
||||||
|
toolchain: stable
|
||||||
|
|
||||||
|
- name: Test
|
||||||
|
run: |
|
||||||
|
cargo test
|
||||||
|
env:
|
||||||
|
DATABASE_URL: ${{ env.TEST_DB }}
|
||||||
|
SCHEDULE_DISABLE_AUTO_UPDATE: 1
|
||||||
|
JWT_SECRET: "test-secret-at-least-256-bits-used"
|
||||||
|
VK_ID_CLIENT_ID: 0
|
||||||
|
VK_ID_REDIRECT_URI: "vk0://vk.com/blank.html"
|
||||||
|
TELEGRAM_BOT_ID: 0
|
||||||
|
TELEGRAM_MINI_APP_HOST: example.com
|
||||||
|
TELEGRAM_TEST_DC: false
|
||||||
|
YANDEX_CLOUD_API_KEY: ""
|
||||||
|
YANDEX_CLOUD_FUNC_ID: ""
|
||||||
|
build:
|
||||||
|
name: Build
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
needs: test
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
fetch-depth: 0
|
||||||
|
|
||||||
|
- name: Setup Rust
|
||||||
|
uses: actions-rust-lang/setup-rust-toolchain@v1.11.0
|
||||||
|
with:
|
||||||
|
toolchain: stable
|
||||||
|
|
||||||
|
- name: Build
|
||||||
|
run: cargo build --release
|
||||||
|
|
||||||
|
- name: Extract debug symbols
|
||||||
|
run: |
|
||||||
|
objcopy --only-keep-debug target/release/${{ env.BINARY_NAME }}{,.d}
|
||||||
|
objcopy --strip-debug --strip-unneeded target/release/${{ env.BINARY_NAME }}
|
||||||
|
objcopy --add-gnu-debuglink target/release/${{ env.BINARY_NAME }}{.d,}
|
||||||
|
|
||||||
|
- name: Setup sentry-cli
|
||||||
|
uses: matbour/setup-sentry-cli@v2.0.0
|
||||||
|
with:
|
||||||
|
version: latest
|
||||||
|
token: ${{ env.SENTRY_AUTH_TOKEN }}
|
||||||
|
organization: ${{ env.SENTRY_ORG }}
|
||||||
|
project: ${{ env.SENTRY_PROJECT }}
|
||||||
|
|
||||||
|
- name: Upload debug symbols to Sentry
|
||||||
|
run: |
|
||||||
|
sentry-cli debug-files upload --include-sources .
|
||||||
|
|
||||||
|
- name: Upload build binary artifact
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
with:
|
||||||
|
name: release-binary
|
||||||
|
path: target/release/${{ env.BINARY_NAME }}
|
||||||
|
|
||||||
|
- name: Upload build debug symbols artifact
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
with:
|
||||||
|
name: release-symbols
|
||||||
|
path: target/release/${{ env.BINARY_NAME }}.d
|
||||||
|
|
||||||
|
docker:
|
||||||
|
name: Build & Push Docker Image
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
needs: build
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Download build artifacts
|
||||||
|
uses: actions/download-artifact@v4
|
||||||
|
with:
|
||||||
|
name: release-binary
|
||||||
|
|
||||||
|
- name: Setup Docker Buildx
|
||||||
|
uses: docker/setup-buildx-action@v3.10.0
|
||||||
|
|
||||||
|
- name: Login to Registry
|
||||||
|
uses: docker/login-action@v3.4.0
|
||||||
|
with:
|
||||||
|
registry: ${{ env.DOCKER_REGISTRY_HOST }}
|
||||||
|
username: ${{ env.DOCKER_REGISTRY_USERNAME }}
|
||||||
|
password: ${{ env.DOCKER_REGISTRY_PASSWORD }}
|
||||||
|
|
||||||
|
- name: Extract Docker metadata
|
||||||
|
id: meta
|
||||||
|
uses: docker/metadata-action@v5.7.0
|
||||||
|
with:
|
||||||
|
images: ${{ env.DOCKER_REGISTRY_HOST }}/${{ env.DOCKER_IMAGE_NAME }}
|
||||||
|
|
||||||
|
- name: Build and push Docker image
|
||||||
|
id: build-and-push
|
||||||
|
uses: docker/build-push-action@v6.15.0
|
||||||
|
with:
|
||||||
|
context: .
|
||||||
|
push: true
|
||||||
|
tags: ${{ steps.meta.outputs.tags }}
|
||||||
|
labels: ${{ steps.meta.outputs.labels }}
|
||||||
|
cache-from: type=gha
|
||||||
|
cache-to: type=gha,mode=max
|
||||||
|
build-args: |
|
||||||
|
"BINARY_NAME=${{ env.BINARY_NAME }}"
|
||||||
|
|
||||||
|
- name: Deploy
|
||||||
|
run: curl ${{ secrets.DEPLOY_URL }}
|
||||||
173
.github/workflows/release.yml
vendored
Normal file
173
.github/workflows/release.yml
vendored
Normal file
@@ -0,0 +1,173 @@
|
|||||||
|
name: release
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
tags: [ "release/v*" ]
|
||||||
|
|
||||||
|
permissions:
|
||||||
|
contents: write
|
||||||
|
|
||||||
|
env:
|
||||||
|
CARGO_TERM_COLOR: always
|
||||||
|
|
||||||
|
BINARY_NAME: schedule-parser-rusted
|
||||||
|
|
||||||
|
TEST_DB: ${{ secrets.TEST_DATABASE_URL }}
|
||||||
|
|
||||||
|
SENTRY_AUTH_TOKEN: ${{ secrets.SENTRY_AUTH_TOKEN }}
|
||||||
|
SENTRY_ORG: ${{ secrets.SENTRY_ORG }}
|
||||||
|
SENTRY_PROJECT: ${{ secrets.SENTRY_PROJECT }}
|
||||||
|
|
||||||
|
DOCKER_IMAGE_NAME: ${{ github.repository }}
|
||||||
|
|
||||||
|
DOCKER_REGISTRY_HOST: registry.n08i40k.ru
|
||||||
|
DOCKER_REGISTRY_USERNAME: ${{ github.repository_owner }}
|
||||||
|
DOCKER_REGISTRY_PASSWORD: ${{ secrets.DOCKER_REGISTRY_PASSWORD }}
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
test:
|
||||||
|
name: Test
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
fetch-depth: 0
|
||||||
|
|
||||||
|
- name: Setup Rust
|
||||||
|
uses: actions-rust-lang/setup-rust-toolchain@v1.11.0
|
||||||
|
with:
|
||||||
|
toolchain: stable
|
||||||
|
|
||||||
|
- name: Test
|
||||||
|
run: |
|
||||||
|
cargo test --verbose
|
||||||
|
env:
|
||||||
|
DATABASE_URL: ${{ env.TEST_DB }}
|
||||||
|
SCHEDULE_DISABLE_AUTO_UPDATE: 1
|
||||||
|
JWT_SECRET: "test-secret-at-least-256-bits-used"
|
||||||
|
VK_ID_CLIENT_ID: 0
|
||||||
|
VK_ID_REDIRECT_URI: "vk0://vk.com/blank.html"
|
||||||
|
TELEGRAM_BOT_ID: 0
|
||||||
|
TELEGRAM_MINI_APP_HOST: example.com
|
||||||
|
TELEGRAM_TEST_DC: false
|
||||||
|
YANDEX_CLOUD_API_KEY: ""
|
||||||
|
YANDEX_CLOUD_FUNC_ID: ""
|
||||||
|
build:
|
||||||
|
name: Build
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
needs: test
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
fetch-depth: 0
|
||||||
|
|
||||||
|
- name: Setup Rust
|
||||||
|
uses: actions-rust-lang/setup-rust-toolchain@v1.11.0
|
||||||
|
with:
|
||||||
|
toolchain: stable
|
||||||
|
|
||||||
|
- name: Build
|
||||||
|
run: cargo build --release --verbose
|
||||||
|
|
||||||
|
- name: Extract debug symbols
|
||||||
|
run: |
|
||||||
|
objcopy --only-keep-debug target/release/${{ env.BINARY_NAME }}{,.d}
|
||||||
|
objcopy --strip-debug --strip-unneeded target/release/${{ env.BINARY_NAME }}
|
||||||
|
objcopy --add-gnu-debuglink target/release/${{ env.BINARY_NAME }}{.d,}
|
||||||
|
|
||||||
|
- name: Setup sentry-cli
|
||||||
|
uses: matbour/setup-sentry-cli@v2.0.0
|
||||||
|
with:
|
||||||
|
version: latest
|
||||||
|
token: ${{ env.SENTRY_AUTH_TOKEN }}
|
||||||
|
organization: ${{ env.SENTRY_ORG }}
|
||||||
|
project: ${{ env.SENTRY_PROJECT }}
|
||||||
|
|
||||||
|
- name: Upload debug symbols to Sentry
|
||||||
|
run: |
|
||||||
|
sentry-cli debug-files upload --include-sources .
|
||||||
|
|
||||||
|
- name: Upload build binary artifact
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
with:
|
||||||
|
name: release-binary
|
||||||
|
path: target/release/${{ env.BINARY_NAME }}
|
||||||
|
|
||||||
|
- name: Upload build debug symbols artifact
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
with:
|
||||||
|
name: release-symbols
|
||||||
|
path: target/release/${{ env.BINARY_NAME }}.d
|
||||||
|
|
||||||
|
docker:
|
||||||
|
name: Build & Push Docker Image
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
needs: build
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Download build artifacts
|
||||||
|
uses: actions/download-artifact@v4
|
||||||
|
with:
|
||||||
|
name: release-binary
|
||||||
|
|
||||||
|
- name: Setup Docker Buildx
|
||||||
|
uses: docker/setup-buildx-action@v3.10.0
|
||||||
|
|
||||||
|
- name: Login to Registry
|
||||||
|
uses: docker/login-action@v3.4.0
|
||||||
|
with:
|
||||||
|
registry: ${{ env.DOCKER_REGISTRY_HOST }}
|
||||||
|
username: ${{ env.DOCKER_REGISTRY_USERNAME }}
|
||||||
|
password: ${{ env.DOCKER_REGISTRY_PASSWORD }}
|
||||||
|
|
||||||
|
- name: Extract Docker metadata
|
||||||
|
id: meta
|
||||||
|
uses: docker/metadata-action@v5.7.0
|
||||||
|
with:
|
||||||
|
images: ${{ env.DOCKER_REGISTRY_HOST }}/${{ env.DOCKER_IMAGE_NAME }}
|
||||||
|
|
||||||
|
- name: Build and push Docker image
|
||||||
|
id: build-and-push
|
||||||
|
uses: docker/build-push-action@v6.15.0
|
||||||
|
with:
|
||||||
|
context: .
|
||||||
|
push: true
|
||||||
|
tags: ${{ steps.meta.outputs.tags }}
|
||||||
|
labels: ${{ steps.meta.outputs.labels }}
|
||||||
|
cache-from: type=gha
|
||||||
|
cache-to: type=gha,mode=max
|
||||||
|
build-args: |
|
||||||
|
"BINARY_NAME=${{ env.BINARY_NAME }}"
|
||||||
|
release:
|
||||||
|
name: Create GitHub Release
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
needs:
|
||||||
|
- build
|
||||||
|
- docker
|
||||||
|
# noinspection GrazieInspection,SpellCheckingInspection
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
fetch-depth: 0
|
||||||
|
|
||||||
|
- name: Generate changelog
|
||||||
|
run: |
|
||||||
|
LAST_TAG=$(git describe --tags --abbrev=0 HEAD^)
|
||||||
|
echo "## Коммиты с прошлого релиза $LAST_TAG" > CHANGELOG.md
|
||||||
|
git log $LAST_TAG..HEAD --oneline >> CHANGELOG.md
|
||||||
|
|
||||||
|
- name: Download build artifacts
|
||||||
|
uses: actions/download-artifact@v4
|
||||||
|
with:
|
||||||
|
pattern: release-*
|
||||||
|
merge-multiple: true
|
||||||
|
|
||||||
|
- name: Create Release
|
||||||
|
id: create_release
|
||||||
|
uses: ncipollo/release-action@v1.16.0
|
||||||
|
env:
|
||||||
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
with:
|
||||||
|
artifacts: "${{ env.BINARY_NAME }},${{ env.BINARY_NAME }}.d"
|
||||||
|
bodyFile: CHANGELOG.md
|
||||||
21
.github/workflows/test.yml
vendored
21
.github/workflows/test.yml
vendored
@@ -1,10 +1,9 @@
|
|||||||
name: Tests
|
name: cargo test
|
||||||
|
|
||||||
on:
|
on:
|
||||||
push:
|
push:
|
||||||
branches: [ "master" ]
|
branches: [ "development" ]
|
||||||
pull_request:
|
tags-ignore: [ "release/v*" ]
|
||||||
branches: [ "master" ]
|
|
||||||
|
|
||||||
permissions:
|
permissions:
|
||||||
contents: read
|
contents: read
|
||||||
@@ -19,14 +18,20 @@ jobs:
|
|||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
- name: Build
|
- name: Build
|
||||||
run: cargo build
|
run: cargo build
|
||||||
- name: Create .env.test
|
|
||||||
run: touch .env.test
|
|
||||||
- name: Run tests
|
- name: Run tests
|
||||||
run: cargo test
|
run: cargo test
|
||||||
env:
|
env:
|
||||||
DATABASE_URL: ${{ secrets.TEST_DATABASE_URL }}
|
DATABASE_URL: ${{ secrets.TEST_DATABASE_URL }}
|
||||||
|
SCHEDULE_DISABLE_AUTO_UPDATE: 1
|
||||||
JWT_SECRET: "test-secret-at-least-256-bits-used"
|
JWT_SECRET: "test-secret-at-least-256-bits-used"
|
||||||
VKID_CLIENT_ID: 0
|
VK_ID_CLIENT_ID: 0
|
||||||
VKID_REDIRECT_URI: "vk0://vk.com/blank.html"
|
VK_ID_REDIRECT_URI: "vk0://vk.com/blank.html"
|
||||||
|
TELEGRAM_BOT_ID: 0
|
||||||
|
TELEGRAM_MINI_APP_HOST: example.com
|
||||||
|
TELEGRAM_TEST_DC: false
|
||||||
|
YANDEX_CLOUD_API_KEY: ""
|
||||||
|
YANDEX_CLOUD_FUNC_ID: ""
|
||||||
6
.idea/schedule-parser-rusted.iml
generated
6
.idea/schedule-parser-rusted.iml
generated
@@ -4,9 +4,13 @@
|
|||||||
<content url="file://$MODULE_DIR$">
|
<content url="file://$MODULE_DIR$">
|
||||||
<sourceFolder url="file://$MODULE_DIR$/lib/schedule_parser/src" isTestSource="false" />
|
<sourceFolder url="file://$MODULE_DIR$/lib/schedule_parser/src" isTestSource="false" />
|
||||||
<sourceFolder url="file://$MODULE_DIR$/src" isTestSource="false" />
|
<sourceFolder url="file://$MODULE_DIR$/src" isTestSource="false" />
|
||||||
<sourceFolder url="file://$MODULE_DIR$/benches" isTestSource="true" />
|
|
||||||
<sourceFolder url="file://$MODULE_DIR$/actix-macros/src" isTestSource="false" />
|
<sourceFolder url="file://$MODULE_DIR$/actix-macros/src" isTestSource="false" />
|
||||||
<sourceFolder url="file://$MODULE_DIR$/actix-test/src" isTestSource="false" />
|
<sourceFolder url="file://$MODULE_DIR$/actix-test/src" isTestSource="false" />
|
||||||
|
<sourceFolder url="file://$MODULE_DIR$/schedule-parser/benches" isTestSource="true" />
|
||||||
|
<sourceFolder url="file://$MODULE_DIR$/schedule-parser/src" isTestSource="false" />
|
||||||
|
<sourceFolder url="file://$MODULE_DIR$/providers/base/src" isTestSource="false" />
|
||||||
|
<sourceFolder url="file://$MODULE_DIR$/providers/provider-engels-polytechnic/src" isTestSource="false" />
|
||||||
|
<sourceFolder url="file://$MODULE_DIR$/providers/src" isTestSource="false" />
|
||||||
<excludeFolder url="file://$MODULE_DIR$/actix-macros/target" />
|
<excludeFolder url="file://$MODULE_DIR$/actix-macros/target" />
|
||||||
<excludeFolder url="file://$MODULE_DIR$/actix-test/target" />
|
<excludeFolder url="file://$MODULE_DIR$/actix-test/target" />
|
||||||
<excludeFolder url="file://$MODULE_DIR$/target" />
|
<excludeFolder url="file://$MODULE_DIR$/target" />
|
||||||
|
|||||||
9
.idea/sqldialects.xml
generated
9
.idea/sqldialects.xml
generated
@@ -1,9 +0,0 @@
|
|||||||
<?xml version="1.0" encoding="UTF-8"?>
|
|
||||||
<project version="4">
|
|
||||||
<component name="SqlDialectMappings">
|
|
||||||
<file url="file://$PROJECT_DIR$/migrations/2025-03-21-211822_create_user_role/down.sql" dialect="PostgreSQL" />
|
|
||||||
<file url="file://$PROJECT_DIR$/migrations/2025-03-21-212111_create_users/up.sql" dialect="PostgreSQL" />
|
|
||||||
<file url="file://$PROJECT_DIR$/migrations/2025-03-21-212723_create_fcm/down.sql" dialect="PostgreSQL" />
|
|
||||||
<file url="file://$PROJECT_DIR$/migrations/2025-03-21-212723_create_fcm/up.sql" dialect="PostgreSQL" />
|
|
||||||
</component>
|
|
||||||
</project>
|
|
||||||
3843
Cargo.lock
generated
3843
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
105
Cargo.toml
105
Cargo.toml
@@ -1,50 +1,81 @@
|
|||||||
[workspace]
|
[workspace]
|
||||||
members = ["actix-macros", "actix-test"]
|
members = ["actix-macros", "actix-test", "providers"]
|
||||||
|
|
||||||
[package]
|
[package]
|
||||||
name = "schedule-parser-rusted"
|
name = "schedule-parser-rusted"
|
||||||
version = "0.8.0"
|
version = "1.3.1"
|
||||||
edition = "2024"
|
edition = "2024"
|
||||||
publish = false
|
publish = false
|
||||||
|
|
||||||
|
[profile.release]
|
||||||
|
debug = true
|
||||||
|
|
||||||
|
[features]
|
||||||
|
trace = ["tracing", "console-subscriber"]
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
actix-web = "4.10.2"
|
providers = { path = "providers" }
|
||||||
actix-macros = { path = "actix-macros" }
|
actix-macros = { path = "actix-macros" }
|
||||||
bcrypt = "0.17.0"
|
|
||||||
calamine = "0.26.1"
|
# serve api
|
||||||
chrono = { version = "0.4.40", features = ["serde"] }
|
actix-web = "4"
|
||||||
derive_more = "2.0.1"
|
|
||||||
diesel = { version = "2.2.8", features = ["postgres"] }
|
# basic
|
||||||
diesel-derive-enum = { git = "https://github.com/Havunen/diesel-derive-enum.git", features = ["postgres"] }
|
chrono = { version = "0", features = ["serde"] }
|
||||||
dotenvy = "0.15.7"
|
derive_more = { version = "2", features = ["full"] }
|
||||||
env_logger = "0.11.7"
|
dotenvy = "0"
|
||||||
firebase-messaging-rs = { git = "https://github.com/i10416/firebase-messaging-rs.git" }
|
|
||||||
futures-util = "0.3.31"
|
# sql
|
||||||
fuzzy-matcher = "0.3.7"
|
database = { path = "database" }
|
||||||
jsonwebtoken = { version = "9.3.1", features = ["use_pem"] }
|
|
||||||
hex = "0.4.3"
|
# logging
|
||||||
mime = "0.3.17"
|
env_logger = "0"
|
||||||
objectid = "0.2.0"
|
|
||||||
regex = "1.11.1"
|
# async
|
||||||
reqwest = { version = "0.12.15", features = ["json"] }
|
tokio = { version = "1", features = ["macros", "rt-multi-thread"] }
|
||||||
sentry = "0.37.0"
|
tokio-util = "0"
|
||||||
sentry-actix = "0.37.0"
|
futures-util = "0"
|
||||||
serde = { version = "1.0.219", features = ["derive"] }
|
|
||||||
serde_json = "1.0.140"
|
# authorization
|
||||||
serde_with = "3.12.0"
|
bcrypt = "0"
|
||||||
serde_repr = "0.1.20"
|
jsonwebtoken = { version = "9", features = ["use_pem"] }
|
||||||
sha1 = "0.11.0-pre.5"
|
|
||||||
tokio = { version = "1.44.1", features = ["macros", "rt-multi-thread"] }
|
# creating users
|
||||||
rand = "0.9.0"
|
objectid = "0"
|
||||||
|
|
||||||
|
# schedule downloader
|
||||||
|
reqwest = { version = "0", features = ["json"] }
|
||||||
|
mime = "0"
|
||||||
|
|
||||||
|
# error handling
|
||||||
|
sentry = "0"
|
||||||
|
sentry-actix = "0"
|
||||||
|
|
||||||
|
# [de]serializing
|
||||||
|
serde = { version = "1", features = ["derive"] }
|
||||||
|
serde_json = "1"
|
||||||
|
serde_with = "3"
|
||||||
|
|
||||||
|
sha1 = "0.11.0-rc.2"
|
||||||
|
|
||||||
|
# documentation
|
||||||
utoipa = { version = "5", features = ["actix_extras", "chrono"] }
|
utoipa = { version = "5", features = ["actix_extras", "chrono"] }
|
||||||
utoipa-rapidoc = { version = "6.0.0", features = ["actix-web"] }
|
utoipa-rapidoc = { version = "6", features = ["actix-web"] }
|
||||||
utoipa-actix-web = "0.1"
|
utoipa-actix-web = "0"
|
||||||
uuid = { version = "1.16.0", features = ["v4"] }
|
|
||||||
|
uuid = { version = "1", features = ["v4"] }
|
||||||
|
hex-literal = "1"
|
||||||
|
log = "0"
|
||||||
|
|
||||||
|
# telegram webdata deciding and verify
|
||||||
|
base64 = "0"
|
||||||
|
percent-encoding = "2"
|
||||||
|
ed25519-dalek = "3.0.0-pre.1"
|
||||||
|
|
||||||
|
# development tracing
|
||||||
|
console-subscriber = { version = "0", optional = true }
|
||||||
|
tracing = { version = "0", optional = true }
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
|
providers = { path = "providers", features = ["test"] }
|
||||||
actix-test = { path = "actix-test" }
|
actix-test = { path = "actix-test" }
|
||||||
criterion = "0.5.1"
|
|
||||||
|
|
||||||
[[bench]]
|
|
||||||
name = "parse"
|
|
||||||
harness = false
|
|
||||||
14
Dockerfile
Normal file
14
Dockerfile
Normal file
@@ -0,0 +1,14 @@
|
|||||||
|
FROM debian:stable-slim
|
||||||
|
LABEL authors="n08i40k"
|
||||||
|
|
||||||
|
ARG BINARY_NAME
|
||||||
|
|
||||||
|
WORKDIR /app/
|
||||||
|
|
||||||
|
RUN apt update && \
|
||||||
|
apt install -y libpq5 ca-certificates openssl
|
||||||
|
|
||||||
|
COPY ./${BINARY_NAME} /bin/main
|
||||||
|
RUN chmod +x /bin/main
|
||||||
|
|
||||||
|
ENTRYPOINT ["main"]
|
||||||
7
actix-macros/Cargo.lock
generated
7
actix-macros/Cargo.lock
generated
@@ -1,7 +0,0 @@
|
|||||||
# This file is automatically @generated by Cargo.
|
|
||||||
# It is not intended for manual editing.
|
|
||||||
version = 4
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "actix-utility-macros"
|
|
||||||
version = "0.1.0"
|
|
||||||
@@ -4,9 +4,9 @@ version = "0.1.0"
|
|||||||
edition = "2024"
|
edition = "2024"
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
syn = "2.0.100"
|
syn = "2.0.106"
|
||||||
quote = "1.0.40"
|
quote = "1.0.40"
|
||||||
proc-macro2 = "1.0.94"
|
proc-macro2 = "1.0.101"
|
||||||
|
|
||||||
[lib]
|
[lib]
|
||||||
proc-macro = true
|
proc-macro = true
|
||||||
@@ -6,7 +6,7 @@ mod shared {
|
|||||||
use quote::{ToTokens, quote};
|
use quote::{ToTokens, quote};
|
||||||
use syn::{Attribute, DeriveInput};
|
use syn::{Attribute, DeriveInput};
|
||||||
|
|
||||||
pub fn find_status_code(attrs: &Vec<Attribute>) -> Option<proc_macro2::TokenStream> {
|
pub fn find_status_code(attrs: &[Attribute]) -> Option<proc_macro2::TokenStream> {
|
||||||
attrs
|
attrs
|
||||||
.iter()
|
.iter()
|
||||||
.find_map(|attr| -> Option<proc_macro2::TokenStream> {
|
.find_map(|attr| -> Option<proc_macro2::TokenStream> {
|
||||||
@@ -41,14 +41,12 @@ mod shared {
|
|||||||
|
|
||||||
let mut status_code_arms: Vec<proc_macro2::TokenStream> = variants
|
let mut status_code_arms: Vec<proc_macro2::TokenStream> = variants
|
||||||
.iter()
|
.iter()
|
||||||
.map(|v| -> Option<proc_macro2::TokenStream> {
|
.filter_map(|v| -> Option<proc_macro2::TokenStream> {
|
||||||
let status_code = find_status_code(&v.attrs)?;
|
let status_code = find_status_code(&v.attrs)?;
|
||||||
let variant_name = &v.ident;
|
let variant_name = &v.ident;
|
||||||
|
|
||||||
Some(quote! { #name::#variant_name => #status_code, })
|
Some(quote! { #name::#variant_name => #status_code, })
|
||||||
})
|
})
|
||||||
.filter(|v| v.is_some())
|
|
||||||
.map(|v| v.unwrap())
|
|
||||||
.collect();
|
.collect();
|
||||||
|
|
||||||
if status_code_arms.len() < variants.len() {
|
if status_code_arms.len() < variants.len() {
|
||||||
@@ -62,7 +60,7 @@ mod shared {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
mod response_error_message {
|
mod middleware_error {
|
||||||
use proc_macro::TokenStream;
|
use proc_macro::TokenStream;
|
||||||
use quote::quote;
|
use quote::quote;
|
||||||
|
|
||||||
@@ -81,28 +79,7 @@ mod response_error_message {
|
|||||||
|
|
||||||
fn error_response(&self) -> ::actix_web::HttpResponse<BoxBody> {
|
fn error_response(&self) -> ::actix_web::HttpResponse<BoxBody> {
|
||||||
::actix_web::HttpResponse::build(self.status_code())
|
::actix_web::HttpResponse::build(self.status_code())
|
||||||
.json(crate::utility::error::ResponseErrorMessage::new(self.clone()))
|
.json(crate::middlewares::error::MiddlewareError::new(self.clone()))
|
||||||
}
|
|
||||||
}
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
mod status_code {
|
|
||||||
use proc_macro::TokenStream;
|
|
||||||
use quote::quote;
|
|
||||||
|
|
||||||
pub fn fmt(ast: &syn::DeriveInput) -> TokenStream {
|
|
||||||
let name = &ast.ident;
|
|
||||||
|
|
||||||
let status_code_arms = super::shared::get_arms(ast);
|
|
||||||
|
|
||||||
TokenStream::from(quote! {
|
|
||||||
impl crate::routes::schema::PartialStatusCode for #name {
|
|
||||||
fn status_code(&self) -> ::actix_web::http::StatusCode {
|
|
||||||
match self {
|
|
||||||
#(#status_code_arms)*
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
@@ -130,7 +107,7 @@ mod responder_json {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
mod into_response_error {
|
mod ok_response {
|
||||||
use proc_macro::TokenStream;
|
use proc_macro::TokenStream;
|
||||||
use quote::quote;
|
use quote::quote;
|
||||||
|
|
||||||
@@ -138,46 +115,37 @@ mod into_response_error {
|
|||||||
let name = &ast.ident;
|
let name = &ast.ident;
|
||||||
|
|
||||||
TokenStream::from(quote! {
|
TokenStream::from(quote! {
|
||||||
impl ::core::convert::Into<crate::routes::schema::ResponseError<#name>> for #name {
|
impl crate::routes::schema::PartialOkResponse for #name {}
|
||||||
fn into(self) -> crate::routes::schema::ResponseError<#name> {
|
|
||||||
crate::routes::schema::ResponseError {
|
|
||||||
code: self,
|
|
||||||
message: ::core::option::Option::None,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<T> crate::routes::schema::IntoResponseAsError<T> for #name
|
|
||||||
where
|
|
||||||
T: ::serde::ser::Serialize + ::utoipa::PartialSchema {}
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn fmt_named(ast: &syn::DeriveInput) -> TokenStream {
|
|
||||||
let name = &ast.ident;
|
|
||||||
|
|
||||||
TokenStream::from(quote! {
|
|
||||||
impl ::core::convert::Into<crate::routes::schema::ResponseError<#name>> for #name {
|
|
||||||
fn into(self) -> crate::routes::schema::ResponseError<#name> {
|
|
||||||
crate::routes::schema::ResponseError {
|
|
||||||
message: ::core::option::Option::Some(format!("{}", self)),
|
|
||||||
code: self,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<T> crate::routes::schema::IntoResponseAsError<T> for #name
|
|
||||||
where
|
|
||||||
T: ::serde::ser::Serialize + ::utoipa::PartialSchema {}
|
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[proc_macro_derive(ResponseErrorMessage, attributes(status_code))]
|
mod err_response {
|
||||||
pub fn rem_derive(input: TokenStream) -> TokenStream {
|
use proc_macro::TokenStream;
|
||||||
|
use quote::quote;
|
||||||
|
|
||||||
|
pub fn fmt(ast: &syn::DeriveInput) -> TokenStream {
|
||||||
|
let name = &ast.ident;
|
||||||
|
|
||||||
|
let status_code_arms = super::shared::get_arms(ast);
|
||||||
|
|
||||||
|
TokenStream::from(quote! {
|
||||||
|
impl crate::routes::schema::PartialErrResponse for #name {
|
||||||
|
fn status_code(&self) -> ::actix_web::http::StatusCode {
|
||||||
|
match self {
|
||||||
|
#(#status_code_arms)*
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[proc_macro_derive(MiddlewareError, attributes(status_code))]
|
||||||
|
pub fn moddleware_error_derive(input: TokenStream) -> TokenStream {
|
||||||
let ast = syn::parse(input).unwrap();
|
let ast = syn::parse(input).unwrap();
|
||||||
|
|
||||||
response_error_message::fmt(&ast)
|
middleware_error::fmt(&ast)
|
||||||
}
|
}
|
||||||
|
|
||||||
#[proc_macro_derive(ResponderJson)]
|
#[proc_macro_derive(ResponderJson)]
|
||||||
@@ -187,23 +155,16 @@ pub fn responser_json_derive(input: TokenStream) -> TokenStream {
|
|||||||
responder_json::fmt(&ast)
|
responder_json::fmt(&ast)
|
||||||
}
|
}
|
||||||
|
|
||||||
#[proc_macro_derive(IntoResponseError)]
|
#[proc_macro_derive(OkResponse)]
|
||||||
pub fn into_response_error_derive(input: TokenStream) -> TokenStream {
|
pub fn ok_response_derive(input: TokenStream) -> TokenStream {
|
||||||
let ast = syn::parse(input).unwrap();
|
let ast = syn::parse(input).unwrap();
|
||||||
|
|
||||||
into_response_error::fmt(&ast)
|
ok_response::fmt(&ast)
|
||||||
}
|
}
|
||||||
|
|
||||||
#[proc_macro_derive(IntoResponseErrorNamed)]
|
#[proc_macro_derive(ErrResponse, attributes(status_code))]
|
||||||
pub fn into_response_error_named_derive(input: TokenStream) -> TokenStream {
|
pub fn err_response_derive(input: TokenStream) -> TokenStream {
|
||||||
let ast = syn::parse(input).unwrap();
|
let ast = syn::parse(input).unwrap();
|
||||||
|
|
||||||
into_response_error::fmt_named(&ast)
|
err_response::fmt(&ast)
|
||||||
}
|
|
||||||
|
|
||||||
#[proc_macro_derive(StatusCode, attributes(status_code))]
|
|
||||||
pub fn status_code_derive(input: TokenStream) -> TokenStream {
|
|
||||||
let ast = syn::parse(input).unwrap();
|
|
||||||
|
|
||||||
status_code::fmt(&ast)
|
|
||||||
}
|
}
|
||||||
|
|||||||
1520
actix-test/Cargo.lock
generated
1520
actix-test/Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
@@ -4,5 +4,5 @@ version = "0.1.0"
|
|||||||
edition = "2024"
|
edition = "2024"
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
actix-http = "3.10.0"
|
actix-http = "3.11.1"
|
||||||
actix-web = "4.10.2"
|
actix-web = "4.11.0"
|
||||||
@@ -1,12 +0,0 @@
|
|||||||
use criterion::{Criterion, criterion_group, criterion_main};
|
|
||||||
|
|
||||||
use schedule_parser_rusted::parser::parse_xls;
|
|
||||||
|
|
||||||
pub fn bench_parse_xls(c: &mut Criterion) {
|
|
||||||
let buffer: Vec<u8> = include_bytes!("../schedule.xls").to_vec();
|
|
||||||
|
|
||||||
c.bench_function("parse_xls", |b| b.iter(|| parse_xls(&buffer).unwrap()));
|
|
||||||
}
|
|
||||||
|
|
||||||
criterion_group!(benches, bench_parse_xls);
|
|
||||||
criterion_main!(benches);
|
|
||||||
12
database/Cargo.toml
Normal file
12
database/Cargo.toml
Normal file
@@ -0,0 +1,12 @@
|
|||||||
|
[package]
|
||||||
|
name = "database"
|
||||||
|
version = "0.1.0"
|
||||||
|
edition = "2024"
|
||||||
|
|
||||||
|
[dependencies]
|
||||||
|
migration = { path = "migration" }
|
||||||
|
entity = { path = "entity" }
|
||||||
|
sea-orm = { version = "2.0.0-rc.15", features = ["sqlx-postgres", "runtime-tokio"] }
|
||||||
|
|
||||||
|
paste = "1"
|
||||||
|
serde = { version = "1", features = ["derive"] }
|
||||||
1
database/entity/.gitignore
vendored
Normal file
1
database/entity/.gitignore
vendored
Normal file
@@ -0,0 +1 @@
|
|||||||
|
/target
|
||||||
9
database/entity/Cargo.toml
Normal file
9
database/entity/Cargo.toml
Normal file
@@ -0,0 +1,9 @@
|
|||||||
|
[package]
|
||||||
|
name = "entity"
|
||||||
|
version = "0.1.0"
|
||||||
|
edition = "2024"
|
||||||
|
|
||||||
|
[dependencies]
|
||||||
|
sea-orm = "2.0.0-rc.6"
|
||||||
|
serde = { version = "1.0.219", features = ["derive"] }
|
||||||
|
utoipa = "5.4.0"
|
||||||
7
database/entity/src/lib.rs
Normal file
7
database/entity/src/lib.rs
Normal file
@@ -0,0 +1,7 @@
|
|||||||
|
//! `SeaORM` Entity, @generated by sea-orm-codegen 1.1.12
|
||||||
|
|
||||||
|
pub mod prelude;
|
||||||
|
|
||||||
|
pub mod sea_orm_active_enums;
|
||||||
|
pub mod service_user;
|
||||||
|
pub mod user;
|
||||||
4
database/entity/src/prelude.rs
Normal file
4
database/entity/src/prelude.rs
Normal file
@@ -0,0 +1,4 @@
|
|||||||
|
//! `SeaORM` Entity, @generated by sea-orm-codegen 1.1.12
|
||||||
|
|
||||||
|
pub use super::service_user::Entity as ServiceUser;
|
||||||
|
pub use super::user::Entity as User;
|
||||||
25
database/entity/src/sea_orm_active_enums.rs
Normal file
25
database/entity/src/sea_orm_active_enums.rs
Normal file
@@ -0,0 +1,25 @@
|
|||||||
|
//! `SeaORM` Entity, @generated by sea-orm-codegen 1.1.12
|
||||||
|
|
||||||
|
use sea_orm::entity::prelude::*;
|
||||||
|
|
||||||
|
#[derive(
|
||||||
|
Debug,
|
||||||
|
Clone,
|
||||||
|
PartialEq,
|
||||||
|
Eq,
|
||||||
|
EnumIter,
|
||||||
|
DeriveActiveEnum,
|
||||||
|
:: serde :: Serialize,
|
||||||
|
:: serde :: Deserialize,
|
||||||
|
:: utoipa :: ToSchema,
|
||||||
|
)]
|
||||||
|
#[sea_orm(rs_type = "String", db_type = "Enum", enum_name = "user_role")]
|
||||||
|
#[serde(rename_all = "SCREAMING_SNAKE_CASE")]
|
||||||
|
pub enum UserRole {
|
||||||
|
#[sea_orm(string_value = "student")]
|
||||||
|
Student,
|
||||||
|
#[sea_orm(string_value = "teacher")]
|
||||||
|
Teacher,
|
||||||
|
#[sea_orm(string_value = "admin")]
|
||||||
|
Admin,
|
||||||
|
}
|
||||||
16
database/entity/src/service_user.rs
Normal file
16
database/entity/src/service_user.rs
Normal file
@@ -0,0 +1,16 @@
|
|||||||
|
//! `SeaORM` Entity, @generated by sea-orm-codegen 1.1.12
|
||||||
|
|
||||||
|
use sea_orm::entity::prelude::*;
|
||||||
|
|
||||||
|
#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq)]
|
||||||
|
#[sea_orm(table_name = "service_user")]
|
||||||
|
pub struct Model {
|
||||||
|
#[sea_orm(primary_key, auto_increment = false)]
|
||||||
|
pub id: String,
|
||||||
|
pub name: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
|
||||||
|
pub enum Relation {}
|
||||||
|
|
||||||
|
impl ActiveModelBehavior for ActiveModel {}
|
||||||
25
database/entity/src/user.rs
Normal file
25
database/entity/src/user.rs
Normal file
@@ -0,0 +1,25 @@
|
|||||||
|
//! `SeaORM` Entity, @generated by sea-orm-codegen 1.1.12
|
||||||
|
|
||||||
|
use super::sea_orm_active_enums::UserRole;
|
||||||
|
use sea_orm::entity::prelude::*;
|
||||||
|
|
||||||
|
#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq)]
|
||||||
|
#[sea_orm(table_name = "user")]
|
||||||
|
pub struct Model {
|
||||||
|
#[sea_orm(primary_key, auto_increment = false)]
|
||||||
|
pub id: String,
|
||||||
|
#[sea_orm(unique)]
|
||||||
|
pub username: String,
|
||||||
|
pub password: Option<String>,
|
||||||
|
pub vk_id: Option<i32>,
|
||||||
|
pub group: Option<String>,
|
||||||
|
pub role: UserRole,
|
||||||
|
pub android_version: Option<String>,
|
||||||
|
#[sea_orm(unique)]
|
||||||
|
pub telegram_id: Option<i64>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
|
||||||
|
pub enum Relation {}
|
||||||
|
|
||||||
|
impl ActiveModelBehavior for ActiveModel {}
|
||||||
1
database/migration/.gitignore
vendored
Normal file
1
database/migration/.gitignore
vendored
Normal file
@@ -0,0 +1 @@
|
|||||||
|
/target
|
||||||
22
database/migration/Cargo.toml
Normal file
22
database/migration/Cargo.toml
Normal file
@@ -0,0 +1,22 @@
|
|||||||
|
[package]
|
||||||
|
name = "migration"
|
||||||
|
version = "0.1.0"
|
||||||
|
edition = "2021"
|
||||||
|
publish = false
|
||||||
|
|
||||||
|
[lib]
|
||||||
|
name = "migration"
|
||||||
|
path = "src/lib.rs"
|
||||||
|
|
||||||
|
[dependencies]
|
||||||
|
async-std = { version = "1", features = ["attributes", "tokio1"] }
|
||||||
|
|
||||||
|
[dependencies.sea-orm-migration]
|
||||||
|
version = "2.0.0-rc.15"
|
||||||
|
features = [
|
||||||
|
# Enable at least one `ASYNC_RUNTIME` and `DATABASE_DRIVER` feature if you want to run migration via CLI.
|
||||||
|
# View the list of supported features at https://www.sea-ql.org/SeaORM/docs/install-and-config/database-and-async-runtime.
|
||||||
|
# e.g.
|
||||||
|
"runtime-tokio", # `ASYNC_RUNTIME` feature
|
||||||
|
"sqlx-postgres", # `DATABASE_DRIVER` feature
|
||||||
|
]
|
||||||
18
database/migration/src/lib.rs
Normal file
18
database/migration/src/lib.rs
Normal file
@@ -0,0 +1,18 @@
|
|||||||
|
pub use sea_orm_migration::prelude::MigratorTrait;
|
||||||
|
|
||||||
|
use sea_orm_migration::prelude::*;
|
||||||
|
|
||||||
|
mod m20250904_024854_init;
|
||||||
|
mod m20251027_230335_add_service_users;
|
||||||
|
|
||||||
|
pub struct Migrator;
|
||||||
|
|
||||||
|
#[async_trait::async_trait]
|
||||||
|
impl MigratorTrait for Migrator {
|
||||||
|
fn migrations() -> Vec<Box<dyn MigrationTrait>> {
|
||||||
|
vec![
|
||||||
|
Box::new(m20250904_024854_init::Migration),
|
||||||
|
Box::new(m20251027_230335_add_service_users::Migration),
|
||||||
|
]
|
||||||
|
}
|
||||||
|
}
|
||||||
70
database/migration/src/m20250904_024854_init.rs
Normal file
70
database/migration/src/m20250904_024854_init.rs
Normal file
@@ -0,0 +1,70 @@
|
|||||||
|
use sea_orm_migration::prelude::extension::postgres::Type;
|
||||||
|
use sea_orm_migration::sea_orm::{EnumIter, Iterable};
|
||||||
|
use sea_orm_migration::{prelude::*, schema::*};
|
||||||
|
|
||||||
|
#[derive(DeriveMigrationName)]
|
||||||
|
pub struct Migration;
|
||||||
|
|
||||||
|
#[async_trait::async_trait]
|
||||||
|
impl MigrationTrait for Migration {
|
||||||
|
async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> {
|
||||||
|
manager
|
||||||
|
.create_type(
|
||||||
|
Type::create()
|
||||||
|
.as_enum(UserRole)
|
||||||
|
.values(UserRoleVariants::iter())
|
||||||
|
.to_owned(),
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
manager
|
||||||
|
.create_table(
|
||||||
|
Table::create()
|
||||||
|
.table(User::Table)
|
||||||
|
.if_not_exists()
|
||||||
|
.col(string_uniq(User::Id).primary_key().not_null())
|
||||||
|
.col(string_uniq(User::Username).not_null())
|
||||||
|
.col(string_null(User::Password))
|
||||||
|
.col(integer_null(User::VkId))
|
||||||
|
.col(string_null(User::Group))
|
||||||
|
.col(enumeration(User::Role, UserRole, UserRoleVariants::iter()))
|
||||||
|
.col(string_null(User::AndroidVersion))
|
||||||
|
.col(big_integer_null(User::TelegramId).unique_key())
|
||||||
|
.to_owned(),
|
||||||
|
)
|
||||||
|
.await
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> {
|
||||||
|
manager
|
||||||
|
.drop_table(Table::drop().table(User::Table).to_owned())
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
manager
|
||||||
|
.drop_type(Type::drop().name(UserRole).to_owned())
|
||||||
|
.await
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(DeriveIden)]
|
||||||
|
struct UserRole;
|
||||||
|
|
||||||
|
#[derive(DeriveIden, EnumIter)]
|
||||||
|
enum UserRoleVariants {
|
||||||
|
Student,
|
||||||
|
Teacher,
|
||||||
|
Admin,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(DeriveIden)]
|
||||||
|
enum User {
|
||||||
|
Table,
|
||||||
|
Id,
|
||||||
|
Username,
|
||||||
|
Password,
|
||||||
|
VkId,
|
||||||
|
Group,
|
||||||
|
Role,
|
||||||
|
AndroidVersion,
|
||||||
|
TelegramId,
|
||||||
|
}
|
||||||
33
database/migration/src/m20251027_230335_add_service_users.rs
Normal file
33
database/migration/src/m20251027_230335_add_service_users.rs
Normal file
@@ -0,0 +1,33 @@
|
|||||||
|
use sea_orm_migration::{prelude::*, schema::*};
|
||||||
|
|
||||||
|
#[derive(DeriveMigrationName)]
|
||||||
|
pub struct Migration;
|
||||||
|
|
||||||
|
#[async_trait::async_trait]
|
||||||
|
impl MigrationTrait for Migration {
|
||||||
|
async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> {
|
||||||
|
manager
|
||||||
|
.create_table(
|
||||||
|
Table::create()
|
||||||
|
.table(ServiceUser::Table)
|
||||||
|
.if_not_exists()
|
||||||
|
.col(string_uniq(ServiceUser::Id).primary_key().not_null())
|
||||||
|
.col(string(ServiceUser::Name))
|
||||||
|
.to_owned(),
|
||||||
|
)
|
||||||
|
.await
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> {
|
||||||
|
manager
|
||||||
|
.drop_table(Table::drop().table(ServiceUser::Table).to_owned())
|
||||||
|
.await
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(DeriveIden)]
|
||||||
|
enum ServiceUser {
|
||||||
|
Table,
|
||||||
|
Id,
|
||||||
|
Name,
|
||||||
|
}
|
||||||
6
database/migration/src/main.rs
Normal file
6
database/migration/src/main.rs
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
use sea_orm_migration::prelude::*;
|
||||||
|
|
||||||
|
#[async_std::main]
|
||||||
|
async fn main() {
|
||||||
|
cli::run_cli(migration::Migrator).await;
|
||||||
|
}
|
||||||
31
database/src/lib.rs
Normal file
31
database/src/lib.rs
Normal file
@@ -0,0 +1,31 @@
|
|||||||
|
pub mod query;
|
||||||
|
|
||||||
|
pub use migration;
|
||||||
|
pub use sea_orm;
|
||||||
|
|
||||||
|
pub mod entity {
|
||||||
|
use serde::{Deserialize, Serialize};
|
||||||
|
|
||||||
|
pub use entity::*;
|
||||||
|
|
||||||
|
pub use entity::user::{
|
||||||
|
ActiveModel as ActiveUser, //
|
||||||
|
Column as UserColumn, //
|
||||||
|
Entity as UserEntity, //
|
||||||
|
Model as User, //
|
||||||
|
};
|
||||||
|
|
||||||
|
pub use entity::service_user::{
|
||||||
|
ActiveModel as ActiveServiceUser, //
|
||||||
|
Column as ServiceUserColumn, //
|
||||||
|
Entity as ServiceUserEntity, //
|
||||||
|
Model as ServiceUser, //
|
||||||
|
};
|
||||||
|
|
||||||
|
#[derive(Debug, Serialize, Deserialize, PartialEq)]
|
||||||
|
#[serde(rename_all = "SCREAMING_SNAKE_CASE")]
|
||||||
|
pub enum UserType {
|
||||||
|
Default,
|
||||||
|
Service,
|
||||||
|
}
|
||||||
|
}
|
||||||
73
database/src/query.rs
Normal file
73
database/src/query.rs
Normal file
@@ -0,0 +1,73 @@
|
|||||||
|
use paste::paste;
|
||||||
|
use sea_orm::ColumnTrait;
|
||||||
|
use sea_orm::EntityTrait;
|
||||||
|
use sea_orm::QueryFilter;
|
||||||
|
|
||||||
|
pub struct Query;
|
||||||
|
|
||||||
|
macro_rules! ref_type {
|
||||||
|
(String) => {
|
||||||
|
&String
|
||||||
|
};
|
||||||
|
(str) => {
|
||||||
|
&str
|
||||||
|
};
|
||||||
|
($other:ty) => {
|
||||||
|
$other
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
macro_rules! define_is_exists {
|
||||||
|
($entity: ident, $by: ident, $by_type: ident, $by_column: ident) => {
|
||||||
|
paste! {
|
||||||
|
pub async fn [<is_ $entity _exists_by_ $by>](
|
||||||
|
db: &::sea_orm::DbConn,
|
||||||
|
$by: ref_type!($by_type)
|
||||||
|
) -> Result<bool, ::sea_orm::DbErr> {
|
||||||
|
::entity::$entity::Entity::find()
|
||||||
|
.filter(::entity::$entity::Column::$by_column.eq($by))
|
||||||
|
.one(db)
|
||||||
|
.await
|
||||||
|
.map(|x| x.is_some())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
macro_rules! define_find_by {
|
||||||
|
($entity: ident, $by: ident, $by_type: ident, $by_column: ident) => {
|
||||||
|
paste! {
|
||||||
|
pub async fn [<find_ $entity _by_ $by>](
|
||||||
|
db: &::sea_orm::DbConn,
|
||||||
|
$by: ref_type!($by_type)
|
||||||
|
) -> Result<Option<::entity::$entity::Model>, ::sea_orm::DbErr> {
|
||||||
|
::entity::$entity::Entity::find()
|
||||||
|
.filter(::entity::$entity::Column::$by_column.eq($by))
|
||||||
|
.one(db)
|
||||||
|
.await
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Query {
|
||||||
|
// User
|
||||||
|
|
||||||
|
define_find_by!(user, id, str, Id);
|
||||||
|
define_find_by!(user, telegram_id, i64, TelegramId);
|
||||||
|
define_find_by!(user, vk_id, i32, VkId);
|
||||||
|
define_find_by!(user, username, str, Username);
|
||||||
|
|
||||||
|
define_is_exists!(user, id, str, Id);
|
||||||
|
define_is_exists!(user, username, str, Username);
|
||||||
|
define_is_exists!(user, telegram_id, i64, TelegramId);
|
||||||
|
define_is_exists!(user, vk_id, i32, VkId);
|
||||||
|
|
||||||
|
// Service user
|
||||||
|
|
||||||
|
define_find_by!(service_user, id, str, Id);
|
||||||
|
define_find_by!(service_user, name, str, Name);
|
||||||
|
|
||||||
|
define_is_exists!(service_user, id, str, Id);
|
||||||
|
define_is_exists!(service_user, name, str, Name);
|
||||||
|
}
|
||||||
@@ -1,9 +0,0 @@
|
|||||||
# For documentation on how to configure this file,
|
|
||||||
# see https://diesel.rs/guides/configuring-diesel-cli
|
|
||||||
|
|
||||||
[print_schema]
|
|
||||||
file = "src/database/schema.rs"
|
|
||||||
custom_type_derives = ["diesel::query_builder::QueryId", "Clone"]
|
|
||||||
|
|
||||||
[migrations_directory]
|
|
||||||
dir = "./migrations"
|
|
||||||
@@ -1,6 +0,0 @@
|
|||||||
-- This file was automatically created by Diesel to set up helper functions
|
|
||||||
-- and other internal bookkeeping. This file is safe to edit, any future
|
|
||||||
-- changes will be added to existing projects as new migrations.
|
|
||||||
|
|
||||||
DROP FUNCTION IF EXISTS diesel_manage_updated_at(_tbl regclass);
|
|
||||||
DROP FUNCTION IF EXISTS diesel_set_updated_at();
|
|
||||||
@@ -1,36 +0,0 @@
|
|||||||
-- This file was automatically created by Diesel to set up helper functions
|
|
||||||
-- and other internal bookkeeping. This file is safe to edit, any future
|
|
||||||
-- changes will be added to existing projects as new migrations.
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
-- Sets up a trigger for the given table to automatically set a column called
|
|
||||||
-- `updated_at` whenever the row is modified (unless `updated_at` was included
|
|
||||||
-- in the modified columns)
|
|
||||||
--
|
|
||||||
-- # Example
|
|
||||||
--
|
|
||||||
-- ```sql
|
|
||||||
-- CREATE TABLE users (id SERIAL PRIMARY KEY, updated_at TIMESTAMP NOT NULL DEFAULT NOW());
|
|
||||||
--
|
|
||||||
-- SELECT diesel_manage_updated_at('users');
|
|
||||||
-- ```
|
|
||||||
CREATE OR REPLACE FUNCTION diesel_manage_updated_at(_tbl regclass) RETURNS VOID AS $$
|
|
||||||
BEGIN
|
|
||||||
EXECUTE format('CREATE TRIGGER set_updated_at BEFORE UPDATE ON %s
|
|
||||||
FOR EACH ROW EXECUTE PROCEDURE diesel_set_updated_at()', _tbl);
|
|
||||||
END;
|
|
||||||
$$ LANGUAGE plpgsql;
|
|
||||||
|
|
||||||
CREATE OR REPLACE FUNCTION diesel_set_updated_at() RETURNS trigger AS $$
|
|
||||||
BEGIN
|
|
||||||
IF (
|
|
||||||
NEW IS DISTINCT FROM OLD AND
|
|
||||||
NEW.updated_at IS NOT DISTINCT FROM OLD.updated_at
|
|
||||||
) THEN
|
|
||||||
NEW.updated_at := current_timestamp;
|
|
||||||
END IF;
|
|
||||||
RETURN NEW;
|
|
||||||
END;
|
|
||||||
$$ LANGUAGE plpgsql;
|
|
||||||
@@ -1 +0,0 @@
|
|||||||
DROP TYPE user_role;
|
|
||||||
@@ -1,4 +0,0 @@
|
|||||||
CREATE TYPE user_role AS ENUM (
|
|
||||||
'STUDENT',
|
|
||||||
'TEACHER',
|
|
||||||
'ADMIN');
|
|
||||||
@@ -1 +0,0 @@
|
|||||||
DROP TABLE users;
|
|
||||||
@@ -1,11 +0,0 @@
|
|||||||
CREATE TABLE users
|
|
||||||
(
|
|
||||||
id text PRIMARY KEY NOT NULL,
|
|
||||||
username text UNIQUE NOT NULL,
|
|
||||||
password text NOT NULL,
|
|
||||||
vk_id int4 NULL,
|
|
||||||
access_token text UNIQUE NOT NULL,
|
|
||||||
"group" text NOT NULL,
|
|
||||||
role user_role NOT NULL,
|
|
||||||
version text NOT NULL
|
|
||||||
);
|
|
||||||
@@ -1 +0,0 @@
|
|||||||
DROP TABLE fcm;
|
|
||||||
@@ -1,6 +0,0 @@
|
|||||||
CREATE TABLE fcm
|
|
||||||
(
|
|
||||||
user_id text PRIMARY KEY NOT NULL REFERENCES users (id),
|
|
||||||
token text NOT NULL,
|
|
||||||
topics text[] NOT NULL CHECK ( array_position(topics, null) is null )
|
|
||||||
);
|
|
||||||
12
providers/Cargo.toml
Normal file
12
providers/Cargo.toml
Normal file
@@ -0,0 +1,12 @@
|
|||||||
|
[package]
|
||||||
|
name = "providers"
|
||||||
|
version = "0.1.0"
|
||||||
|
edition = "2024"
|
||||||
|
|
||||||
|
[features]
|
||||||
|
test = ["provider-engels-polytechnic/test"]
|
||||||
|
|
||||||
|
[dependencies]
|
||||||
|
base = { path = "base" }
|
||||||
|
|
||||||
|
provider-engels-polytechnic = { path = "provider-engels-polytechnic" }
|
||||||
17
providers/base/Cargo.toml
Normal file
17
providers/base/Cargo.toml
Normal file
@@ -0,0 +1,17 @@
|
|||||||
|
[package]
|
||||||
|
name = "base"
|
||||||
|
version = "0.1.0"
|
||||||
|
edition = "2024"
|
||||||
|
|
||||||
|
[dependencies]
|
||||||
|
tokio-util = "0.7.16"
|
||||||
|
async-trait = "0.1.89"
|
||||||
|
|
||||||
|
chrono = { version = "0.4.41", features = ["serde"] }
|
||||||
|
|
||||||
|
serde = { version = "1.0.219", features = ["derive"] }
|
||||||
|
serde_repr = "0.1.20"
|
||||||
|
|
||||||
|
utoipa = { version = "5.4.0", features = ["macros", "chrono"] }
|
||||||
|
|
||||||
|
sha1 = "0.11.0-rc.2"
|
||||||
@@ -1,4 +1,6 @@
|
|||||||
use sha1::Digest;
|
use sha1::Digest;
|
||||||
|
use sha1::digest::OutputSizeUser;
|
||||||
|
use sha1::digest::typenum::Unsigned;
|
||||||
use std::hash::Hasher;
|
use std::hash::Hasher;
|
||||||
|
|
||||||
/// Hesher returning hash from the algorithm implementing Digest
|
/// Hesher returning hash from the algorithm implementing Digest
|
||||||
@@ -12,7 +14,20 @@ where
|
|||||||
{
|
{
|
||||||
/// Obtain hash.
|
/// Obtain hash.
|
||||||
pub fn finalize(self) -> String {
|
pub fn finalize(self) -> String {
|
||||||
hex::encode(self.digest.finalize().0)
|
static ALPHABET: [char; 16] = [
|
||||||
|
'0', '1', '2', '3', '4', '5', '6', '7', '8', '9', 'A', 'B', 'C', 'D', 'E', 'F',
|
||||||
|
];
|
||||||
|
|
||||||
|
let mut hex = String::with_capacity(<D as OutputSizeUser>::OutputSize::USIZE * 2);
|
||||||
|
|
||||||
|
for byte in self.digest.finalize().0.into_iter() {
|
||||||
|
let byte: u8 = byte;
|
||||||
|
|
||||||
|
hex.push(ALPHABET[(byte >> 4) as usize]);
|
||||||
|
hex.push(ALPHABET[(byte & 0xF) as usize]);
|
||||||
|
}
|
||||||
|
|
||||||
|
hex
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
231
providers/base/src/lib.rs
Normal file
231
providers/base/src/lib.rs
Normal file
@@ -0,0 +1,231 @@
|
|||||||
|
use crate::hasher::DigestHasher;
|
||||||
|
use async_trait::async_trait;
|
||||||
|
use chrono::{DateTime, Utc};
|
||||||
|
use serde::{Deserialize, Serialize};
|
||||||
|
use serde_repr::{Deserialize_repr, Serialize_repr};
|
||||||
|
use sha1::{Digest, Sha1};
|
||||||
|
use std::collections::HashMap;
|
||||||
|
use std::hash::Hash;
|
||||||
|
use std::sync::Arc;
|
||||||
|
use tokio_util::sync::CancellationToken;
|
||||||
|
use utoipa::ToSchema;
|
||||||
|
|
||||||
|
mod hasher;
|
||||||
|
|
||||||
|
// pub(crate) mod internal {
|
||||||
|
// use super::{LessonBoundaries, LessonType};
|
||||||
|
// use chrono::{DateTime, Utc};
|
||||||
|
//
|
||||||
|
// /// Data cell storing the group name.
|
||||||
|
// pub struct GroupCellInfo {
|
||||||
|
// /// Column index.
|
||||||
|
// pub column: u32,
|
||||||
|
//
|
||||||
|
// /// Text in the cell.
|
||||||
|
// pub name: String,
|
||||||
|
// }
|
||||||
|
//
|
||||||
|
// /// Data cell storing the line.
|
||||||
|
// pub struct DayCellInfo {
|
||||||
|
// /// Line index.
|
||||||
|
// pub row: u32,
|
||||||
|
//
|
||||||
|
// /// Column index.
|
||||||
|
// pub column: u32,
|
||||||
|
//
|
||||||
|
// /// Day name.
|
||||||
|
// pub name: String,
|
||||||
|
//
|
||||||
|
// /// Date of the day.
|
||||||
|
// pub date: DateTime<Utc>,
|
||||||
|
// }
|
||||||
|
//
|
||||||
|
// /// Data on the time of lessons from the second column of the schedule.
|
||||||
|
// pub struct BoundariesCellInfo {
|
||||||
|
// /// Temporary segment of the lesson.
|
||||||
|
// pub time_range: LessonBoundaries,
|
||||||
|
//
|
||||||
|
// /// Type of lesson.
|
||||||
|
// pub lesson_type: LessonType,
|
||||||
|
//
|
||||||
|
// /// The lesson index.
|
||||||
|
// pub default_index: Option<u32>,
|
||||||
|
//
|
||||||
|
// /// The frame of the cell.
|
||||||
|
// pub xls_range: ((u32, u32), (u32, u32)),
|
||||||
|
// }
|
||||||
|
// }
|
||||||
|
|
||||||
|
/// The beginning and end of the lesson.
|
||||||
|
#[derive(Clone, Hash, Debug, Serialize, Deserialize, ToSchema)]
|
||||||
|
pub struct LessonBoundaries {
|
||||||
|
/// The beginning of a lesson.
|
||||||
|
pub start: DateTime<Utc>,
|
||||||
|
|
||||||
|
/// The end of the lesson.
|
||||||
|
pub end: DateTime<Utc>,
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Type of lesson.
|
||||||
|
#[derive(Clone, Hash, PartialEq, Debug, Serialize_repr, Deserialize_repr, ToSchema)]
|
||||||
|
#[serde(rename_all = "SCREAMING_SNAKE_CASE")]
|
||||||
|
#[repr(u8)]
|
||||||
|
pub enum LessonType {
|
||||||
|
/// Обычная.
|
||||||
|
Default = 0,
|
||||||
|
|
||||||
|
/// Допы.
|
||||||
|
Additional,
|
||||||
|
|
||||||
|
/// Перемена.
|
||||||
|
Break,
|
||||||
|
|
||||||
|
/// Консультация.
|
||||||
|
Consultation,
|
||||||
|
|
||||||
|
/// Самостоятельная работа.
|
||||||
|
IndependentWork,
|
||||||
|
|
||||||
|
/// Зачёт.
|
||||||
|
Exam,
|
||||||
|
|
||||||
|
/// Зачёт с оценкой.
|
||||||
|
ExamWithGrade,
|
||||||
|
|
||||||
|
/// Экзамен.
|
||||||
|
ExamDefault,
|
||||||
|
|
||||||
|
/// Курсовой проект.
|
||||||
|
CourseProject,
|
||||||
|
|
||||||
|
/// Защита курсового проекта.
|
||||||
|
CourseProjectDefense,
|
||||||
|
|
||||||
|
/// Практическое занятие.
|
||||||
|
Practice,
|
||||||
|
|
||||||
|
/// Дифференцированный зачёт.
|
||||||
|
DifferentiatedExam,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone, Hash, Debug, Serialize, Deserialize, ToSchema)]
|
||||||
|
pub struct LessonSubGroup {
|
||||||
|
/// Cabinet, if present.
|
||||||
|
pub cabinet: Option<String>,
|
||||||
|
|
||||||
|
/// Full name of the teacher.
|
||||||
|
pub teacher: Option<String>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone, Hash, Debug, Serialize, Deserialize, ToSchema)]
|
||||||
|
#[serde(rename_all = "camelCase")]
|
||||||
|
pub struct Lesson {
|
||||||
|
/// Type.
|
||||||
|
#[serde(rename = "type")]
|
||||||
|
pub lesson_type: LessonType,
|
||||||
|
|
||||||
|
/// Lesson indexes, if present.
|
||||||
|
pub range: Option<[u8; 2]>,
|
||||||
|
|
||||||
|
/// Name.
|
||||||
|
pub name: Option<String>,
|
||||||
|
|
||||||
|
/// The beginning and end.
|
||||||
|
pub time: LessonBoundaries,
|
||||||
|
|
||||||
|
/// List of subgroups.
|
||||||
|
#[serde(rename = "subgroups")]
|
||||||
|
pub subgroups: Option<Vec<Option<LessonSubGroup>>>,
|
||||||
|
|
||||||
|
/// Group name, if this is a schedule for teachers.
|
||||||
|
pub group: Option<String>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone, Hash, Debug, Serialize, Deserialize, ToSchema)]
|
||||||
|
pub struct Day {
|
||||||
|
/// Day of the week.
|
||||||
|
pub name: String,
|
||||||
|
|
||||||
|
/// Address of another corps.
|
||||||
|
pub street: Option<String>,
|
||||||
|
|
||||||
|
/// Date.
|
||||||
|
pub date: DateTime<Utc>,
|
||||||
|
|
||||||
|
/// List of lessons on this day.
|
||||||
|
pub lessons: Vec<Lesson>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone, Hash, Debug, Serialize, Deserialize, ToSchema)]
|
||||||
|
pub struct ScheduleEntry {
|
||||||
|
/// The name of the group or name of the teacher.
|
||||||
|
pub name: String,
|
||||||
|
|
||||||
|
/// List of six days.
|
||||||
|
pub days: Vec<Day>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone)]
|
||||||
|
pub struct ParsedSchedule {
|
||||||
|
/// List of groups.
|
||||||
|
pub groups: HashMap<String, ScheduleEntry>,
|
||||||
|
|
||||||
|
/// List of teachers.
|
||||||
|
pub teachers: HashMap<String, ScheduleEntry>,
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Represents a snapshot of the schedule parsed from an XLS file.
|
||||||
|
#[derive(Clone)]
|
||||||
|
pub struct ScheduleSnapshot {
|
||||||
|
/// Timestamp when the Polytechnic website was queried for the schedule.
|
||||||
|
pub fetched_at: DateTime<Utc>,
|
||||||
|
|
||||||
|
/// Timestamp indicating when the schedule was last updated on the Polytechnic website.
|
||||||
|
///
|
||||||
|
/// <note>
|
||||||
|
/// This value is determined by the website's content and does not depend on the application.
|
||||||
|
/// </note>
|
||||||
|
pub updated_at: DateTime<Utc>,
|
||||||
|
|
||||||
|
/// URL pointing to the XLS file containing the source schedule data.
|
||||||
|
pub url: String,
|
||||||
|
|
||||||
|
/// Parsed schedule data in the application's internal representation.
|
||||||
|
pub data: ParsedSchedule,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ScheduleSnapshot {
|
||||||
|
/// Converting the schedule data into a hash.
|
||||||
|
/// ### Important!
|
||||||
|
/// The hash does not depend on the dates.
|
||||||
|
/// If the application is restarted, but the file with source schedule will remain unchanged, then the hash will not change.
|
||||||
|
pub fn hash(&self) -> String {
|
||||||
|
let mut hasher = DigestHasher::from(Sha1::new());
|
||||||
|
|
||||||
|
self.data.teachers.iter().for_each(|e| e.hash(&mut hasher));
|
||||||
|
self.data.groups.iter().for_each(|e| e.hash(&mut hasher));
|
||||||
|
|
||||||
|
hasher.finalize()
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Simply updates the value of [`ScheduleSnapshot::fetched_at`].
|
||||||
|
/// Used for auto-updates.
|
||||||
|
pub fn update(&mut self) {
|
||||||
|
self.fetched_at = Utc::now();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[async_trait]
|
||||||
|
pub trait ScheduleProvider
|
||||||
|
where
|
||||||
|
Self: Sync + Send,
|
||||||
|
{
|
||||||
|
/// Returns ok when task has been canceled.
|
||||||
|
/// Returns err when error appeared while trying to parse or download schedule
|
||||||
|
async fn start_auto_update_task(
|
||||||
|
&self,
|
||||||
|
cancellation_token: CancellationToken,
|
||||||
|
) -> Result<(), Box<dyn std::error::Error + Send + Sync + 'static>>;
|
||||||
|
|
||||||
|
async fn get_schedule(&self) -> Arc<ScheduleSnapshot>;
|
||||||
|
}
|
||||||
31
providers/provider-engels-polytechnic/Cargo.toml
Normal file
31
providers/provider-engels-polytechnic/Cargo.toml
Normal file
@@ -0,0 +1,31 @@
|
|||||||
|
[package]
|
||||||
|
name = "provider-engels-polytechnic"
|
||||||
|
version = "0.2.3"
|
||||||
|
edition = "2024"
|
||||||
|
|
||||||
|
[features]
|
||||||
|
test = []
|
||||||
|
|
||||||
|
[dependencies]
|
||||||
|
base = { path = "../base" }
|
||||||
|
|
||||||
|
tokio = { version = "1", features = ["sync", "macros", "time"] }
|
||||||
|
tokio-util = "0"
|
||||||
|
|
||||||
|
chrono = { version = "0", features = ["serde"] }
|
||||||
|
|
||||||
|
derive_more = { version = "2", features = ["error", "display", "from"] }
|
||||||
|
|
||||||
|
utoipa = { version = "5", features = ["macros", "chrono"] }
|
||||||
|
|
||||||
|
calamine = "0"
|
||||||
|
async-trait = "0"
|
||||||
|
|
||||||
|
reqwest = "0"
|
||||||
|
ua_generator = "0"
|
||||||
|
regex = "1"
|
||||||
|
strsim = "0"
|
||||||
|
log = "0"
|
||||||
|
sentry = "0"
|
||||||
|
fancy-regex = "0"
|
||||||
|
|
||||||
84
providers/provider-engels-polytechnic/src/lib.rs
Normal file
84
providers/provider-engels-polytechnic/src/lib.rs
Normal file
@@ -0,0 +1,84 @@
|
|||||||
|
pub use crate::updater::{UpdateSource, Updater};
|
||||||
|
use async_trait::async_trait;
|
||||||
|
use base::{ScheduleProvider, ScheduleSnapshot};
|
||||||
|
use std::ops::DerefMut;
|
||||||
|
use std::sync::Arc;
|
||||||
|
use std::time::Duration;
|
||||||
|
use tokio::sync::RwLock;
|
||||||
|
use tokio::time::interval;
|
||||||
|
use tokio_util::sync::CancellationToken;
|
||||||
|
|
||||||
|
mod parser;
|
||||||
|
mod updater;
|
||||||
|
mod xls_downloader;
|
||||||
|
|
||||||
|
#[cfg(feature = "test")]
|
||||||
|
pub mod test_utils {
|
||||||
|
pub use crate::parser::test_utils::test_result;
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct EngelsPolytechnicProvider {
|
||||||
|
updater: Updater,
|
||||||
|
snapshot: Arc<ScheduleSnapshot>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl EngelsPolytechnicProvider {
|
||||||
|
pub async fn get(
|
||||||
|
update_source: UpdateSource,
|
||||||
|
) -> Result<Arc<dyn ScheduleProvider>, crate::updater::Error> {
|
||||||
|
let (updater, snapshot) = Updater::new(update_source).await?;
|
||||||
|
|
||||||
|
Ok(Arc::new(Wrapper {
|
||||||
|
inner: RwLock::new(Self {
|
||||||
|
updater,
|
||||||
|
snapshot: Arc::new(snapshot),
|
||||||
|
}),
|
||||||
|
}))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct Wrapper {
|
||||||
|
inner: RwLock<EngelsPolytechnicProvider>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[async_trait]
|
||||||
|
impl ScheduleProvider for Wrapper {
|
||||||
|
async fn start_auto_update_task(
|
||||||
|
&self,
|
||||||
|
cancellation_token: CancellationToken,
|
||||||
|
) -> Result<(), Box<dyn std::error::Error + Send + Sync + 'static>> {
|
||||||
|
let mut ticker = interval(Duration::from_secs(60 * 30));
|
||||||
|
ticker.tick().await; // bc we already have the latest schedule, when instantiating provider
|
||||||
|
|
||||||
|
loop {
|
||||||
|
tokio::select! {
|
||||||
|
_ = ticker.tick() => {
|
||||||
|
let mut lock = self.inner.write().await;
|
||||||
|
let this= lock.deref_mut();
|
||||||
|
|
||||||
|
log::info!("Updating schedule...");
|
||||||
|
|
||||||
|
match this.updater.update(&this.snapshot).await {
|
||||||
|
Ok(snapshot) => {
|
||||||
|
this.snapshot = Arc::new(snapshot);
|
||||||
|
},
|
||||||
|
|
||||||
|
Err(updater::Error::EmptyUri) => {},
|
||||||
|
|
||||||
|
Err(err) => {
|
||||||
|
sentry::capture_error(&err);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
_ = cancellation_token.cancelled() => {
|
||||||
|
return Ok(());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn get_schedule(&self) -> Arc<ScheduleSnapshot> {
|
||||||
|
self.inner.read().await.snapshot.clone()
|
||||||
|
}
|
||||||
|
}
|
||||||
25
providers/provider-engels-polytechnic/src/parser/error.rs
Normal file
25
providers/provider-engels-polytechnic/src/parser/error.rs
Normal file
@@ -0,0 +1,25 @@
|
|||||||
|
use crate::parser::worksheet::CellPos;
|
||||||
|
use derive_more::{Display, Error, From};
|
||||||
|
|
||||||
|
#[derive(Debug, Display, Error, From)]
|
||||||
|
pub enum Error {
|
||||||
|
#[from]
|
||||||
|
BadXls(calamine::XlsError),
|
||||||
|
|
||||||
|
#[display("No work sheets found.")]
|
||||||
|
NoWorkSheets,
|
||||||
|
|
||||||
|
#[display("There is no data on work sheet boundaries.")]
|
||||||
|
UnknownWorkSheetRange,
|
||||||
|
|
||||||
|
#[display("Failed to read lesson start and end of lesson at {_0}.")]
|
||||||
|
NoLessonBoundaries(CellPos),
|
||||||
|
|
||||||
|
#[display("No start and end times matching the lesson (at {_0}) was found.")]
|
||||||
|
LessonTimeNotFound(CellPos),
|
||||||
|
|
||||||
|
#[display("Unknown lesson type `{type}` at {pos}")]
|
||||||
|
UnknownLessonType { pos: CellPos, r#type: String },
|
||||||
|
}
|
||||||
|
|
||||||
|
pub type Result<T> = core::result::Result<T, Error>;
|
||||||
21
providers/provider-engels-polytechnic/src/parser/macros.rs
Normal file
21
providers/provider-engels-polytechnic/src/parser/macros.rs
Normal file
@@ -0,0 +1,21 @@
|
|||||||
|
#[macro_export]
|
||||||
|
macro_rules! or_continue {
|
||||||
|
( $e:expr ) => {{
|
||||||
|
if let Some(x) = $e {
|
||||||
|
x
|
||||||
|
} else {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
}};
|
||||||
|
}
|
||||||
|
|
||||||
|
#[macro_export]
|
||||||
|
macro_rules! or_break {
|
||||||
|
( $e:expr ) => {{
|
||||||
|
if let Some(x) = $e {
|
||||||
|
x
|
||||||
|
} else {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}};
|
||||||
|
}
|
||||||
859
providers/provider-engels-polytechnic/src/parser/mod.rs
Normal file
859
providers/provider-engels-polytechnic/src/parser/mod.rs
Normal file
@@ -0,0 +1,859 @@
|
|||||||
|
pub use self::error::{Error, Result};
|
||||||
|
use crate::or_continue;
|
||||||
|
use crate::parser::worksheet::{CellPos, CellRange, WorkSheet};
|
||||||
|
use crate::parser::LessonParseResult::{Lessons, Street};
|
||||||
|
use base::LessonType::Break;
|
||||||
|
use base::{
|
||||||
|
Day, Lesson, LessonBoundaries, LessonSubGroup, LessonType, ParsedSchedule, ScheduleEntry,
|
||||||
|
};
|
||||||
|
use calamine::{open_workbook_from_rs, Reader, Xls};
|
||||||
|
use chrono::{DateTime, Duration, NaiveDate, NaiveTime, Utc};
|
||||||
|
use regex::Regex;
|
||||||
|
use std::collections::HashMap;
|
||||||
|
use std::io::Cursor;
|
||||||
|
use std::sync::LazyLock;
|
||||||
|
|
||||||
|
mod error;
|
||||||
|
mod macros;
|
||||||
|
mod worksheet;
|
||||||
|
|
||||||
|
/// Data cell storing the group name.
|
||||||
|
pub struct GroupMarkup {
|
||||||
|
/// Column index.
|
||||||
|
pub column: u32,
|
||||||
|
|
||||||
|
/// Text in the cell.
|
||||||
|
pub name: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Data cell storing the line.
|
||||||
|
pub struct DayMarkup {
|
||||||
|
/// Line index.
|
||||||
|
pub row: u32,
|
||||||
|
|
||||||
|
/// Column index.
|
||||||
|
pub column: u32,
|
||||||
|
|
||||||
|
/// Day name.
|
||||||
|
pub name: String,
|
||||||
|
|
||||||
|
/// Date of the day.
|
||||||
|
pub date: DateTime<Utc>,
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct WorkSheetMarkup {
|
||||||
|
days: Box<[DayMarkup]>,
|
||||||
|
groups: Box<[GroupMarkup]>,
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Data on the time of lessons from the second column of the schedule.
|
||||||
|
pub struct BoundariesData {
|
||||||
|
/// Temporary segment of the lesson.
|
||||||
|
pub time_range: LessonBoundaries,
|
||||||
|
|
||||||
|
/// Type of lesson.
|
||||||
|
pub lesson_type: LessonType,
|
||||||
|
|
||||||
|
/// The lesson index.
|
||||||
|
pub default_index: Option<u32>,
|
||||||
|
|
||||||
|
/// The frame of the cell.
|
||||||
|
pub range: CellRange,
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Obtaining a "skeleton" schedule from the working sheet.
|
||||||
|
fn parse_markup(worksheet: &WorkSheet) -> Result<WorkSheetMarkup> {
|
||||||
|
struct PartialDayMarkup {
|
||||||
|
row: u32,
|
||||||
|
name: String,
|
||||||
|
date: Option<DateTime<Utc>>,
|
||||||
|
}
|
||||||
|
|
||||||
|
let mut groups: Vec<GroupMarkup> = Vec::new();
|
||||||
|
let mut days: Vec<PartialDayMarkup> = Vec::new();
|
||||||
|
|
||||||
|
let (start_row, start_col) = worksheet.start().ok_or(Error::UnknownWorkSheetRange)?;
|
||||||
|
let (end_row, end_col) = worksheet.end().ok_or(Error::UnknownWorkSheetRange)?;
|
||||||
|
|
||||||
|
let mut row = start_row;
|
||||||
|
|
||||||
|
while row < end_row {
|
||||||
|
row += 1;
|
||||||
|
|
||||||
|
let day_full_name = or_continue!(worksheet.get_string_from_cell(row, 0));
|
||||||
|
|
||||||
|
// parse groups row when days column will found
|
||||||
|
if groups.is_empty() {
|
||||||
|
// переход на предыдущую строку
|
||||||
|
row -= 1;
|
||||||
|
|
||||||
|
for column in (start_col + 2)..=end_col {
|
||||||
|
groups.push(GroupMarkup {
|
||||||
|
column,
|
||||||
|
name: or_continue!(worksheet.get_string_from_cell(row, column))
|
||||||
|
.replace(" ", ""),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
// возврат на текущую строку
|
||||||
|
row += 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
let (day_name, day_date) = {
|
||||||
|
let space_index = match day_full_name.find(' ') {
|
||||||
|
Some(index) => {
|
||||||
|
if index < 10 {
|
||||||
|
break;
|
||||||
|
} else {
|
||||||
|
index
|
||||||
|
}
|
||||||
|
}
|
||||||
|
None => break,
|
||||||
|
};
|
||||||
|
|
||||||
|
let name = day_full_name[..space_index].to_string();
|
||||||
|
|
||||||
|
let date_slice = &day_full_name[space_index + 1..];
|
||||||
|
let date = NaiveDate::parse_from_str(date_slice, "%d.%m.%Y")
|
||||||
|
.map(|date| date.and_time(NaiveTime::default()).and_utc())
|
||||||
|
.ok();
|
||||||
|
|
||||||
|
(name, date)
|
||||||
|
};
|
||||||
|
|
||||||
|
days.push(PartialDayMarkup {
|
||||||
|
row,
|
||||||
|
name: day_name,
|
||||||
|
date: day_date,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
// fix unparsable day dates
|
||||||
|
let days_max = days.len().min(5);
|
||||||
|
|
||||||
|
for i in 0..days_max {
|
||||||
|
if days[i].date.is_none() && days[i + 1].date.is_some() {
|
||||||
|
days[i].date = Some(days[i + 1].date.unwrap() - Duration::days(1));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
for i in 0..days_max {
|
||||||
|
let i = days_max - i;
|
||||||
|
|
||||||
|
if days[i - 1].date.is_none() && days[i].date.is_some() {
|
||||||
|
days[i - 1].date = Some(days[i].date.unwrap() - Duration::days(1));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let days = days
|
||||||
|
.into_iter()
|
||||||
|
.map(|day| DayMarkup {
|
||||||
|
row: day.row,
|
||||||
|
column: 0,
|
||||||
|
name: day.name,
|
||||||
|
date: day.date.unwrap(),
|
||||||
|
})
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
Ok(WorkSheetMarkup {
|
||||||
|
days,
|
||||||
|
groups: groups.into_boxed_slice(),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
/// The result of obtaining a lesson from the cell.
|
||||||
|
enum LessonParseResult {
|
||||||
|
/// List of lessons long from one to two.
|
||||||
|
///
|
||||||
|
/// The number of lessons will be equal to one if the couple is the first in the day,
|
||||||
|
/// otherwise the list from the change template and the lesson itself will be returned.
|
||||||
|
Lessons(Vec<Lesson>),
|
||||||
|
|
||||||
|
/// Street on which the Polytechnic Corps is located.
|
||||||
|
Street(String),
|
||||||
|
}
|
||||||
|
|
||||||
|
// noinspection GrazieInspection
|
||||||
|
/// Obtaining a non-standard type of lesson by name.
|
||||||
|
fn guess_lesson_type(text: &str) -> Option<LessonType> {
|
||||||
|
static MAP: LazyLock<HashMap<&str, LessonType>> = LazyLock::new(|| {
|
||||||
|
HashMap::from([
|
||||||
|
("о важном", LessonType::Additional),
|
||||||
|
("консультация", LessonType::Consultation),
|
||||||
|
("самостоятельная работа", LessonType::IndependentWork),
|
||||||
|
("зачет", LessonType::Exam),
|
||||||
|
("зачет с оценкой", LessonType::ExamWithGrade),
|
||||||
|
("экзамен", LessonType::ExamDefault),
|
||||||
|
("курсовой проект", LessonType::CourseProject),
|
||||||
|
("защита курсового проекта", LessonType::CourseProjectDefense),
|
||||||
|
("практическое занятие", LessonType::Practice),
|
||||||
|
("дифференцированный зачет", LessonType::DifferentiatedExam),
|
||||||
|
])
|
||||||
|
});
|
||||||
|
|
||||||
|
let name_lower = text.to_lowercase();
|
||||||
|
|
||||||
|
MAP.iter()
|
||||||
|
.map(|(text, lesson_type)| (lesson_type, strsim::levenshtein(text, &name_lower)))
|
||||||
|
.filter(|x| x.1 <= 4)
|
||||||
|
.min_by_key(|(_, score)| *score)
|
||||||
|
.map(|v| v.0.clone())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Getting a pair or street from a cell.
|
||||||
|
fn parse_lesson(
|
||||||
|
worksheet: &WorkSheet,
|
||||||
|
day: &Day,
|
||||||
|
day_boundaries: &[BoundariesData],
|
||||||
|
lesson_boundaries: &BoundariesData,
|
||||||
|
group_column: u32,
|
||||||
|
) -> Result<LessonParseResult> {
|
||||||
|
let row = lesson_boundaries.range.start.row;
|
||||||
|
|
||||||
|
let name = {
|
||||||
|
let cell_data = match worksheet.get_string_from_cell(row, group_column) {
|
||||||
|
Some(x) => x,
|
||||||
|
None => return Ok(Lessons(Vec::new())),
|
||||||
|
};
|
||||||
|
|
||||||
|
static OTHER_STREET_RE: LazyLock<Regex> =
|
||||||
|
LazyLock::new(|| Regex::new(r"^[А-Я][а-я]+[,\s]+д\.\s\d+$").unwrap());
|
||||||
|
|
||||||
|
if OTHER_STREET_RE.is_match(&cell_data) {
|
||||||
|
return Ok(Street(cell_data));
|
||||||
|
}
|
||||||
|
|
||||||
|
cell_data
|
||||||
|
};
|
||||||
|
|
||||||
|
let lesson_cell_range = worksheet.get_merge_from_start(row, group_column);
|
||||||
|
|
||||||
|
let (default_range, lesson_time) = {
|
||||||
|
let end_time_arr = day_boundaries
|
||||||
|
.iter()
|
||||||
|
.filter(
|
||||||
|
|BoundariesData {
|
||||||
|
range: CellRange { end, .. },
|
||||||
|
..
|
||||||
|
}| { lesson_cell_range.end.row <= end.row },
|
||||||
|
)
|
||||||
|
.collect::<Vec<&BoundariesData>>();
|
||||||
|
|
||||||
|
let end_time = end_time_arr
|
||||||
|
.first()
|
||||||
|
.ok_or(Error::LessonTimeNotFound(CellPos::new(row, group_column)))?;
|
||||||
|
|
||||||
|
let range: Option<[u8; 2]> = if lesson_boundaries.default_index.is_some() {
|
||||||
|
let default = lesson_boundaries.default_index.unwrap() as u8;
|
||||||
|
Some([default, end_time.default_index.unwrap() as u8])
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
};
|
||||||
|
|
||||||
|
let time = LessonBoundaries {
|
||||||
|
start: lesson_boundaries.time_range.start,
|
||||||
|
end: end_time.time_range.end,
|
||||||
|
};
|
||||||
|
|
||||||
|
(range, time)
|
||||||
|
};
|
||||||
|
|
||||||
|
let ParsedLessonName {
|
||||||
|
name,
|
||||||
|
mut subgroups,
|
||||||
|
r#type: lesson_type,
|
||||||
|
} = parse_name_and_subgroups(&name, row, group_column)?;
|
||||||
|
|
||||||
|
{
|
||||||
|
let cabinets: Vec<String> = parse_cabinets(
|
||||||
|
worksheet,
|
||||||
|
(lesson_cell_range.start.row, lesson_cell_range.end.row),
|
||||||
|
group_column + 1,
|
||||||
|
);
|
||||||
|
|
||||||
|
let cab_count = cabinets.len();
|
||||||
|
|
||||||
|
if cab_count == 1 {
|
||||||
|
// Назначаем этот кабинет всем подгруппам
|
||||||
|
let cab = Some(cabinets.first().unwrap().clone());
|
||||||
|
|
||||||
|
for subgroup in subgroups.iter_mut().flatten() {
|
||||||
|
subgroup.cabinet = cab.clone()
|
||||||
|
}
|
||||||
|
} else if cab_count == 2 {
|
||||||
|
while subgroups.len() < cab_count {
|
||||||
|
subgroups.push(subgroups.last().unwrap_or(&None).clone());
|
||||||
|
}
|
||||||
|
|
||||||
|
for i in 0..cab_count {
|
||||||
|
let subgroup = subgroups.get_mut(i).unwrap();
|
||||||
|
let cabinet = Some(cabinets.get(i).unwrap().clone());
|
||||||
|
|
||||||
|
match subgroup {
|
||||||
|
None => {
|
||||||
|
let _ = subgroup.insert(LessonSubGroup {
|
||||||
|
teacher: None,
|
||||||
|
cabinet,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
Some(subgroup) => {
|
||||||
|
subgroup.cabinet = cabinet;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
let lesson = Lesson {
|
||||||
|
lesson_type: lesson_type.unwrap_or(lesson_boundaries.lesson_type.clone()),
|
||||||
|
range: default_range,
|
||||||
|
name: Some(name),
|
||||||
|
time: lesson_time,
|
||||||
|
subgroups: if subgroups.len() == 2 && subgroups.iter().all(|x| x.is_none()) {
|
||||||
|
None
|
||||||
|
} else {
|
||||||
|
Some(subgroups)
|
||||||
|
},
|
||||||
|
group: None,
|
||||||
|
};
|
||||||
|
|
||||||
|
let prev_lesson = if day.lessons.is_empty() {
|
||||||
|
return Ok(Lessons(Vec::from([lesson])));
|
||||||
|
} else {
|
||||||
|
&day.lessons[day.lessons.len() - 1]
|
||||||
|
};
|
||||||
|
|
||||||
|
Ok(Lessons(Vec::from([
|
||||||
|
Lesson {
|
||||||
|
lesson_type: Break,
|
||||||
|
range: None,
|
||||||
|
name: None,
|
||||||
|
time: LessonBoundaries {
|
||||||
|
start: prev_lesson.time.end,
|
||||||
|
end: lesson.time.start,
|
||||||
|
},
|
||||||
|
subgroups: Some(Vec::new()),
|
||||||
|
group: None,
|
||||||
|
},
|
||||||
|
lesson,
|
||||||
|
])))
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Obtaining a list of cabinets to the right of the lesson cell.
|
||||||
|
fn parse_cabinets(worksheet: &WorkSheet, row_range: (u32, u32), column: u32) -> Vec<String> {
|
||||||
|
let mut cabinets: Vec<String> = Vec::new();
|
||||||
|
|
||||||
|
for row in row_range.0..row_range.1 {
|
||||||
|
let raw = or_continue!(worksheet.get_string_from_cell(row, column));
|
||||||
|
|
||||||
|
let clean = raw.replace("\n", " ");
|
||||||
|
let parts: Vec<&str> = clean.split(" ").collect();
|
||||||
|
|
||||||
|
parts.iter().take(2).for_each(|part| {
|
||||||
|
let clean_part = part.to_string().trim().to_string();
|
||||||
|
|
||||||
|
cabinets.push(clean_part);
|
||||||
|
});
|
||||||
|
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
|
||||||
|
cabinets
|
||||||
|
}
|
||||||
|
|
||||||
|
struct ParsedLessonName {
|
||||||
|
name: String,
|
||||||
|
subgroups: Vec<Option<LessonSubGroup>>,
|
||||||
|
r#type: Option<LessonType>,
|
||||||
|
}
|
||||||
|
|
||||||
|
//noinspection GrazieInspection
|
||||||
|
/// Getting the "pure" name of the lesson and list of teachers from the text of the lesson cell.
|
||||||
|
fn parse_name_and_subgroups(text: &str, row: u32, column: u32) -> Result<ParsedLessonName> {
|
||||||
|
// Части названия пары:
|
||||||
|
// 1. Само название.
|
||||||
|
// 2. Список преподавателей и подгрупп.
|
||||||
|
// 3. "Модификатор" (чаще всего).
|
||||||
|
//
|
||||||
|
// Регулярное выражение для получения ФИО преподавателей и номеров подгрупп (aka. второй части).
|
||||||
|
static NAME_RE: LazyLock<fancy_regex::Regex> = LazyLock::new(|| {
|
||||||
|
fancy_regex::Regex::new(
|
||||||
|
r"([А-Я][а-я]+(?:[\s.]*[А-Я]){1,2})(?=[^А-Яа-я])[.\s]*(?:\(?(\d)[\sа-я]*\)?)?",
|
||||||
|
)
|
||||||
|
.unwrap()
|
||||||
|
});
|
||||||
|
|
||||||
|
let text = text
|
||||||
|
.chars()
|
||||||
|
.filter(|c: &char| {
|
||||||
|
c.is_whitespace()
|
||||||
|
|| c.is_ascii_digit()
|
||||||
|
|| (*c >= 'а' && *c <= 'я')
|
||||||
|
|| (*c >= 'А' && *c <= 'Я')
|
||||||
|
|| *c == '.'
|
||||||
|
|| *c == '-'
|
||||||
|
})
|
||||||
|
.collect::<String>()
|
||||||
|
.replace(r"\s+", " ");
|
||||||
|
|
||||||
|
let mut lesson_name: Option<&str> = None;
|
||||||
|
let mut extra: Option<&str> = None;
|
||||||
|
|
||||||
|
let mut shared_subgroup = true;
|
||||||
|
let mut subgroups: [Option<LessonSubGroup>; 2] = [None, None];
|
||||||
|
|
||||||
|
for capture in NAME_RE.captures_iter(&text).take(2) {
|
||||||
|
let capture = capture.unwrap();
|
||||||
|
|
||||||
|
if lesson_name.is_none() {
|
||||||
|
lesson_name = Some(&text[..capture.get(0).unwrap().start()]);
|
||||||
|
}
|
||||||
|
|
||||||
|
extra = Some(&text[capture.get(0).unwrap().end()..]);
|
||||||
|
|
||||||
|
let teacher_name = {
|
||||||
|
let clean = capture
|
||||||
|
.get(1)
|
||||||
|
.unwrap()
|
||||||
|
.as_str()
|
||||||
|
.chars()
|
||||||
|
.filter(|c| c.is_alphabetic())
|
||||||
|
.collect::<Vec<char>>();
|
||||||
|
|
||||||
|
if clean.get(clean.len() - 2).is_some_and(|c| c.is_uppercase()) {
|
||||||
|
let (name, remaining) = clean.split_at(clean.len() - 2);
|
||||||
|
format!(
|
||||||
|
"{} {}.{}.",
|
||||||
|
name.iter().collect::<String>(),
|
||||||
|
remaining[0],
|
||||||
|
remaining[1]
|
||||||
|
)
|
||||||
|
} else {
|
||||||
|
let (remaining, name) = clean.split_last().unwrap();
|
||||||
|
format!("{} {}.", name.iter().collect::<String>(), remaining)
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
let subgroup_index = capture.get(2).map(|m| m.as_str().parse::<u32>().unwrap());
|
||||||
|
|
||||||
|
let subgroup = Some(LessonSubGroup {
|
||||||
|
cabinet: None,
|
||||||
|
teacher: Some(teacher_name),
|
||||||
|
});
|
||||||
|
|
||||||
|
match subgroup_index {
|
||||||
|
None => {
|
||||||
|
// we have only 2 matches max so more than 2 subgroups we cant have 100%
|
||||||
|
*subgroups.iter_mut().find(|x| x.is_none()).unwrap() = subgroup;
|
||||||
|
}
|
||||||
|
Some(num) => {
|
||||||
|
// bc we have indexed subgroup
|
||||||
|
shared_subgroup = false;
|
||||||
|
|
||||||
|
// 1 - 1 = 0 | 2 - 1 = 1 | 3 - 1 = 2 (schedule index to array index)
|
||||||
|
// 0 % 2 = 0 | 1 % 2 = 1 | 2 % 2 = 0 (clamp)
|
||||||
|
let subgroup_index = ((num - 1) % 2) as usize;
|
||||||
|
|
||||||
|
// if we have subgroup in that index (probably non-indexed, we change it index to free)
|
||||||
|
if subgroups[subgroup_index].is_some() {
|
||||||
|
subgroups.swap(0, 1);
|
||||||
|
}
|
||||||
|
|
||||||
|
subgroups[subgroup_index] = subgroup;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let subgroups = if lesson_name.is_none() {
|
||||||
|
Vec::new()
|
||||||
|
} else if shared_subgroup {
|
||||||
|
Vec::from([subgroups.into_iter().next().unwrap()])
|
||||||
|
} else {
|
||||||
|
Vec::from(subgroups)
|
||||||
|
};
|
||||||
|
|
||||||
|
if extra.is_none() {
|
||||||
|
extra = text
|
||||||
|
.rfind(" ")
|
||||||
|
.and_then(|i| text[..i].rfind(" "))
|
||||||
|
.map(|i| &text[i + 1..]);
|
||||||
|
}
|
||||||
|
|
||||||
|
let lesson_type = if let Some(extra) = extra
|
||||||
|
&& extra.len() > 4
|
||||||
|
{
|
||||||
|
let result = guess_lesson_type(extra);
|
||||||
|
|
||||||
|
if result.is_none() {
|
||||||
|
#[cfg(not(debug_assertions))]
|
||||||
|
sentry::capture_error(&Error::UnknownLessonType {
|
||||||
|
r#type: extra.to_string(),
|
||||||
|
pos: CellPos::new(row, column),
|
||||||
|
});
|
||||||
|
|
||||||
|
#[cfg(debug_assertions)]
|
||||||
|
log::warn!(
|
||||||
|
"{}",
|
||||||
|
Error::UnknownLessonType {
|
||||||
|
r#type: extra.to_string(),
|
||||||
|
pos: CellPos::new(row, column),
|
||||||
|
}
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
result
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
};
|
||||||
|
|
||||||
|
Ok(ParsedLessonName {
|
||||||
|
name: lesson_name.unwrap_or(&text).to_string(),
|
||||||
|
subgroups,
|
||||||
|
r#type: lesson_type,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Getting the start and end of a pair from a cell in the first column of a document.
|
||||||
|
///
|
||||||
|
/// # Arguments
|
||||||
|
///
|
||||||
|
/// * `cell_data`: text in cell.
|
||||||
|
/// * `date`: date of the current day.
|
||||||
|
fn parse_lesson_boundaries_cell(cell_data: &str, date: DateTime<Utc>) -> Option<LessonBoundaries> {
|
||||||
|
static TIME_RE: LazyLock<Regex> =
|
||||||
|
LazyLock::new(|| Regex::new(r"(\d+\.\d+)-(\d+\.\d+)").unwrap());
|
||||||
|
|
||||||
|
let parse_res = TIME_RE.captures(cell_data)?;
|
||||||
|
|
||||||
|
let start_match = parse_res.get(1).unwrap().as_str();
|
||||||
|
let start_parts: Vec<&str> = start_match.split(".").collect();
|
||||||
|
|
||||||
|
let end_match = parse_res.get(2).unwrap().as_str();
|
||||||
|
let end_parts: Vec<&str> = end_match.split(".").collect();
|
||||||
|
|
||||||
|
static GET_TIME: fn(DateTime<Utc>, &Vec<&str>) -> DateTime<Utc> = |date, parts| {
|
||||||
|
date + Duration::hours(parts[0].parse::<i64>().unwrap() - 4)
|
||||||
|
+ Duration::minutes(parts[1].parse::<i64>().unwrap())
|
||||||
|
};
|
||||||
|
|
||||||
|
Some(LessonBoundaries {
|
||||||
|
start: GET_TIME(date, &start_parts),
|
||||||
|
end: GET_TIME(date, &end_parts),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Parse the column of the document to obtain a list of day's lesson boundaries.
|
||||||
|
///
|
||||||
|
/// # Arguments
|
||||||
|
///
|
||||||
|
/// * `worksheet`: document.
|
||||||
|
/// * `date`: date of the current day.
|
||||||
|
/// * `row_range`: row boundaries of the current day.
|
||||||
|
/// * `column`: column with the required data.
|
||||||
|
fn parse_day_boundaries(
|
||||||
|
worksheet: &WorkSheet,
|
||||||
|
date: DateTime<Utc>,
|
||||||
|
row_range: (u32, u32),
|
||||||
|
column: u32,
|
||||||
|
) -> Result<Vec<BoundariesData>> {
|
||||||
|
let mut day_times: Vec<BoundariesData> = Vec::new();
|
||||||
|
|
||||||
|
for row in row_range.0..row_range.1 {
|
||||||
|
let time_cell = if let Some(str) = worksheet.get_string_from_cell(row, column) {
|
||||||
|
str
|
||||||
|
} else {
|
||||||
|
continue;
|
||||||
|
};
|
||||||
|
|
||||||
|
let lesson_time = parse_lesson_boundaries_cell(&time_cell, date)
|
||||||
|
.ok_or(Error::NoLessonBoundaries(CellPos::new(row, column)))?;
|
||||||
|
|
||||||
|
// type
|
||||||
|
let lesson_type = if time_cell.contains("пара") {
|
||||||
|
LessonType::Default
|
||||||
|
} else {
|
||||||
|
LessonType::Additional
|
||||||
|
};
|
||||||
|
|
||||||
|
// lesson index
|
||||||
|
let default_index = if lesson_type == LessonType::Default {
|
||||||
|
Some(
|
||||||
|
time_cell
|
||||||
|
.chars()
|
||||||
|
.next()
|
||||||
|
.unwrap()
|
||||||
|
.to_string()
|
||||||
|
.parse::<u32>()
|
||||||
|
.unwrap(),
|
||||||
|
)
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
};
|
||||||
|
|
||||||
|
day_times.push(BoundariesData {
|
||||||
|
time_range: lesson_time,
|
||||||
|
lesson_type,
|
||||||
|
default_index,
|
||||||
|
range: worksheet.get_merge_from_start(row, column),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(day_times)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Parse the column of the document to obtain a list of week's lesson boundaries.
|
||||||
|
///
|
||||||
|
/// # Arguments
|
||||||
|
///
|
||||||
|
/// * `worksheet`: document.
|
||||||
|
/// * `week_markup`: markup of the current week.
|
||||||
|
fn parse_week_boundaries(
|
||||||
|
worksheet: &WorkSheet,
|
||||||
|
week_markup: &[DayMarkup],
|
||||||
|
) -> Result<Vec<Vec<BoundariesData>>> {
|
||||||
|
let mut result: Vec<Vec<BoundariesData>> = Vec::new();
|
||||||
|
|
||||||
|
let worksheet_end_row = worksheet.end().unwrap().0;
|
||||||
|
let lesson_time_column = week_markup[0].column + 1;
|
||||||
|
|
||||||
|
for day_index in 0..week_markup.len() {
|
||||||
|
let day_markup = &week_markup[day_index];
|
||||||
|
|
||||||
|
// Если текущий день не последнему, то индекс строки следующего дня.
|
||||||
|
// Если текущий день - последний, то индекс последней строки документа.
|
||||||
|
let end_row = if day_index != week_markup.len() - 1 {
|
||||||
|
week_markup[day_index + 1].row
|
||||||
|
} else {
|
||||||
|
worksheet_end_row
|
||||||
|
};
|
||||||
|
|
||||||
|
let day_boundaries = parse_day_boundaries(
|
||||||
|
worksheet,
|
||||||
|
day_markup.date,
|
||||||
|
(day_markup.row, end_row),
|
||||||
|
lesson_time_column,
|
||||||
|
)?;
|
||||||
|
|
||||||
|
result.push(day_boundaries);
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(result)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Conversion of the list of couples of groups in the list of lessons of teachers.
|
||||||
|
fn convert_groups_to_teachers(
|
||||||
|
groups: &HashMap<String, ScheduleEntry>,
|
||||||
|
) -> HashMap<String, ScheduleEntry> {
|
||||||
|
let mut teachers: HashMap<String, ScheduleEntry> = HashMap::new();
|
||||||
|
|
||||||
|
let empty_days: Vec<Day> = groups
|
||||||
|
.values()
|
||||||
|
.next()
|
||||||
|
.unwrap()
|
||||||
|
.days
|
||||||
|
.iter()
|
||||||
|
.map(|day| Day {
|
||||||
|
name: day.name.clone(),
|
||||||
|
street: day.street.clone(),
|
||||||
|
date: day.date,
|
||||||
|
lessons: vec![],
|
||||||
|
})
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
for group in groups.values() {
|
||||||
|
for (index, day) in group.days.iter().enumerate() {
|
||||||
|
for group_lesson in &day.lessons {
|
||||||
|
if group_lesson.lesson_type == Break {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
if group_lesson.subgroups.is_none() {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
let subgroups = group_lesson.subgroups.as_ref().unwrap();
|
||||||
|
|
||||||
|
for subgroup in subgroups {
|
||||||
|
let teacher = match subgroup {
|
||||||
|
None => continue,
|
||||||
|
Some(subgroup) => match &subgroup.teacher {
|
||||||
|
None => continue,
|
||||||
|
Some(teacher) => teacher,
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
if teacher == "Ошибка в расписании" {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
if !teachers.contains_key(teacher) {
|
||||||
|
teachers.insert(
|
||||||
|
teacher.clone(),
|
||||||
|
ScheduleEntry {
|
||||||
|
name: teacher.clone(),
|
||||||
|
days: empty_days.to_vec(),
|
||||||
|
},
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
let teacher_day = teachers
|
||||||
|
.get_mut(teacher)
|
||||||
|
.unwrap()
|
||||||
|
.days
|
||||||
|
.get_mut(index)
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
|
teacher_day.lessons.push({
|
||||||
|
let mut lesson = group_lesson.clone();
|
||||||
|
lesson.group = Some(group.name.clone());
|
||||||
|
|
||||||
|
lesson
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
teachers.iter_mut().for_each(|(_, teacher)| {
|
||||||
|
teacher.days.iter_mut().for_each(|day| {
|
||||||
|
day.lessons
|
||||||
|
.sort_by(|a, b| a.range.as_ref().unwrap()[1].cmp(&b.range.as_ref().unwrap()[1]))
|
||||||
|
})
|
||||||
|
});
|
||||||
|
|
||||||
|
teachers
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Reading XLS Document from the buffer and converting it into the schedule ready to use.
|
||||||
|
///
|
||||||
|
/// # Arguments
|
||||||
|
///
|
||||||
|
/// * `buffer`: XLS data containing schedule.
|
||||||
|
///
|
||||||
|
/// returns: Result<ParseResult, Error>
|
||||||
|
pub fn parse_xls(buffer: &Vec<u8>) -> Result<ParsedSchedule> {
|
||||||
|
let cursor = Cursor::new(&buffer);
|
||||||
|
let mut workbook: Xls<_> = open_workbook_from_rs(cursor)?;
|
||||||
|
|
||||||
|
let worksheet = {
|
||||||
|
let (worksheet_name, worksheet) = workbook
|
||||||
|
.worksheets()
|
||||||
|
.first()
|
||||||
|
.ok_or(Error::NoWorkSheets)?
|
||||||
|
.clone();
|
||||||
|
|
||||||
|
let worksheet_merges = workbook
|
||||||
|
.worksheet_merge_cells(&worksheet_name)
|
||||||
|
.ok_or(Error::NoWorkSheets)?;
|
||||||
|
|
||||||
|
WorkSheet {
|
||||||
|
data: worksheet,
|
||||||
|
merges: worksheet_merges,
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
let WorkSheetMarkup {
|
||||||
|
days: week_markup,
|
||||||
|
groups: groups_markup,
|
||||||
|
} = parse_markup(&worksheet)?;
|
||||||
|
|
||||||
|
let week_boundaries = parse_week_boundaries(&worksheet, &week_markup)?;
|
||||||
|
|
||||||
|
let mut groups: HashMap<String, ScheduleEntry> = HashMap::new();
|
||||||
|
|
||||||
|
for group_markup in groups_markup {
|
||||||
|
let mut group = ScheduleEntry {
|
||||||
|
name: group_markup.name,
|
||||||
|
days: Vec::new(),
|
||||||
|
};
|
||||||
|
|
||||||
|
for day_index in 0..week_markup.len() {
|
||||||
|
let day_markup = &week_markup[day_index];
|
||||||
|
|
||||||
|
let mut day = Day {
|
||||||
|
name: day_markup.name.clone(),
|
||||||
|
street: None,
|
||||||
|
date: day_markup.date,
|
||||||
|
lessons: Vec::new(),
|
||||||
|
};
|
||||||
|
|
||||||
|
let day_boundaries = &week_boundaries[day_index];
|
||||||
|
|
||||||
|
for lesson_boundaries in day_boundaries {
|
||||||
|
match &mut parse_lesson(
|
||||||
|
&worksheet,
|
||||||
|
&day,
|
||||||
|
day_boundaries,
|
||||||
|
lesson_boundaries,
|
||||||
|
group_markup.column,
|
||||||
|
)? {
|
||||||
|
Lessons(lesson) => day.lessons.append(lesson),
|
||||||
|
Street(street) => day.street = Some(street.to_owned()),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
group.days.push(day);
|
||||||
|
}
|
||||||
|
|
||||||
|
groups.insert(group.name.clone(), group);
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(ParsedSchedule {
|
||||||
|
teachers: convert_groups_to_teachers(&groups),
|
||||||
|
groups,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(any(test, feature = "test"))]
|
||||||
|
pub mod test_utils {
|
||||||
|
use super::*;
|
||||||
|
use base::ParsedSchedule;
|
||||||
|
|
||||||
|
pub fn test_result() -> Result<ParsedSchedule> {
|
||||||
|
parse_xls(&include_bytes!("../../../../test-data/engels-polytechnic.xls").to_vec())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
pub mod tests {
|
||||||
|
#[test]
|
||||||
|
fn read() {
|
||||||
|
let result = super::test_utils::test_result();
|
||||||
|
|
||||||
|
assert!(result.is_ok(), "{}", result.err().unwrap());
|
||||||
|
|
||||||
|
assert_ne!(result.as_ref().unwrap().groups.len(), 0);
|
||||||
|
assert_ne!(result.as_ref().unwrap().teachers.len(), 0);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_split_lesson() {
|
||||||
|
let result = super::test_utils::test_result();
|
||||||
|
assert!(result.is_ok(), "{}", result.err().unwrap());
|
||||||
|
|
||||||
|
let result = result.unwrap();
|
||||||
|
assert!(result.groups.contains_key("ИС-214/23"));
|
||||||
|
|
||||||
|
let group = result.groups.get("ИС-214/23").unwrap();
|
||||||
|
|
||||||
|
let thursday = group.days.get(3).unwrap();
|
||||||
|
assert_eq!(thursday.lessons.len(), 1);
|
||||||
|
|
||||||
|
let lesson = &thursday.lessons[0];
|
||||||
|
assert_eq!(lesson.range.unwrap()[1], 3);
|
||||||
|
assert!(lesson.subgroups.is_some());
|
||||||
|
|
||||||
|
let subgroups = lesson.subgroups.as_ref().unwrap();
|
||||||
|
assert_eq!(subgroups.len(), 2);
|
||||||
|
|
||||||
|
assert_eq!(
|
||||||
|
subgroups[0].as_ref().unwrap().cabinet,
|
||||||
|
Some("44".to_string())
|
||||||
|
);
|
||||||
|
|
||||||
|
assert_eq!(
|
||||||
|
subgroups[1].as_ref().unwrap().cabinet,
|
||||||
|
Some("43".to_string())
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
105
providers/provider-engels-polytechnic/src/parser/worksheet.rs
Normal file
105
providers/provider-engels-polytechnic/src/parser/worksheet.rs
Normal file
@@ -0,0 +1,105 @@
|
|||||||
|
use regex::Regex;
|
||||||
|
use std::fmt::{Display, Formatter};
|
||||||
|
use std::ops::Deref;
|
||||||
|
use std::sync::LazyLock;
|
||||||
|
|
||||||
|
/// XLS WorkSheet data.
|
||||||
|
pub struct WorkSheet {
|
||||||
|
pub data: calamine::Range<calamine::Data>,
|
||||||
|
pub merges: Vec<calamine::Dimensions>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone, Debug, derive_more::Error)]
|
||||||
|
pub struct CellPos {
|
||||||
|
pub row: u32,
|
||||||
|
pub column: u32,
|
||||||
|
}
|
||||||
|
|
||||||
|
fn format_column_index(index: u32) -> String {
|
||||||
|
// https://stackoverflow.com/a/297214
|
||||||
|
let quotient = index / 26;
|
||||||
|
|
||||||
|
let char = char::from((65 + (index % 26)) as u8);
|
||||||
|
|
||||||
|
if quotient > 0 {
|
||||||
|
return format!("{}{}", format_column_index(quotient - 1), char);
|
||||||
|
}
|
||||||
|
|
||||||
|
char.to_string()
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Display for CellPos {
|
||||||
|
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
|
||||||
|
f.write_fmt(format_args!(
|
||||||
|
"column {}, row {}",
|
||||||
|
format_column_index(self.column),
|
||||||
|
self.row + 1,
|
||||||
|
))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct CellRange {
|
||||||
|
pub start: CellPos,
|
||||||
|
pub end: CellPos,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Deref for WorkSheet {
|
||||||
|
type Target = calamine::Range<calamine::Data>;
|
||||||
|
|
||||||
|
fn deref(&self) -> &Self::Target {
|
||||||
|
&self.data
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl WorkSheet {
|
||||||
|
/// Getting a line from the required cell.
|
||||||
|
pub fn get_string_from_cell(&self, row: u32, col: u32) -> Option<String> {
|
||||||
|
let cell_data = if let Some(data) = self.get((row as usize, col as usize)) {
|
||||||
|
data.to_string()
|
||||||
|
} else {
|
||||||
|
return None;
|
||||||
|
};
|
||||||
|
|
||||||
|
if cell_data.trim().is_empty() {
|
||||||
|
return None;
|
||||||
|
}
|
||||||
|
|
||||||
|
static NL_RE: LazyLock<Regex> = LazyLock::new(|| Regex::new(r"[\n\r]+").unwrap());
|
||||||
|
static SP_RE: LazyLock<Regex> = LazyLock::new(|| Regex::new(r"\s+").unwrap());
|
||||||
|
|
||||||
|
let trimmed_data = SP_RE
|
||||||
|
.replace_all(&NL_RE.replace_all(&cell_data, " "), " ")
|
||||||
|
.trim()
|
||||||
|
.to_string();
|
||||||
|
|
||||||
|
if trimmed_data.is_empty() {
|
||||||
|
None
|
||||||
|
} else {
|
||||||
|
Some(trimmed_data)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Obtaining the boundaries of the cell along its upper left coordinate.
|
||||||
|
pub fn get_merge_from_start(&self, row: u32, column: u32) -> CellRange {
|
||||||
|
match self
|
||||||
|
.merges
|
||||||
|
.iter()
|
||||||
|
.find(|merge| merge.start.0 == row && merge.start.1 == column)
|
||||||
|
{
|
||||||
|
Some(merge) => CellRange {
|
||||||
|
start: CellPos::new(merge.start.0, merge.start.1),
|
||||||
|
end: CellPos::new(merge.end.0 + 1, merge.end.1 + 1),
|
||||||
|
},
|
||||||
|
None => CellRange {
|
||||||
|
start: CellPos::new(row, column),
|
||||||
|
end: CellPos::new(row + 1, column + 1),
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl CellPos {
|
||||||
|
pub fn new(row: u32, column: u32) -> Self {
|
||||||
|
Self { row, column }
|
||||||
|
}
|
||||||
|
}
|
||||||
33
providers/provider-engels-polytechnic/src/updater/error.rs
Normal file
33
providers/provider-engels-polytechnic/src/updater/error.rs
Normal file
@@ -0,0 +1,33 @@
|
|||||||
|
use crate::xls_downloader::FetchError;
|
||||||
|
use derive_more::{Display, Error, From};
|
||||||
|
|
||||||
|
#[derive(Debug, Display, Error, From)]
|
||||||
|
pub enum Error {
|
||||||
|
/// Occurs when the request to the Yandex Cloud API fails.
|
||||||
|
///
|
||||||
|
/// This may be due to network issues, invalid API key, incorrect function ID, or other
|
||||||
|
/// problems with the Yandex Cloud Function invocation.
|
||||||
|
#[display("An error occurred during the request to the Yandex Cloud API: {_0}")]
|
||||||
|
Reqwest(reqwest::Error),
|
||||||
|
|
||||||
|
#[display("Unable to get URI in 3 retries")]
|
||||||
|
EmptyUri,
|
||||||
|
|
||||||
|
/// The ETag is the same (no update needed).
|
||||||
|
#[display("The ETag is the same.")]
|
||||||
|
SameETag,
|
||||||
|
|
||||||
|
/// The URL query for the XLS file failed to execute, either due to network issues or invalid API parameters.
|
||||||
|
#[display("Failed to fetch URL: {_0}")]
|
||||||
|
ScheduleFetchFailed(FetchError),
|
||||||
|
|
||||||
|
/// Downloading the XLS file content failed after successfully obtaining the URL.
|
||||||
|
#[display("Download failed: {_0}")]
|
||||||
|
ScheduleDownloadFailed(FetchError),
|
||||||
|
|
||||||
|
/// The XLS file could not be parsed into a valid schedule format.
|
||||||
|
#[from]
|
||||||
|
InvalidSchedule(crate::parser::Error),
|
||||||
|
}
|
||||||
|
|
||||||
|
pub type Result<T> = core::result::Result<T, Error>;
|
||||||
225
providers/provider-engels-polytechnic/src/updater/mod.rs
Normal file
225
providers/provider-engels-polytechnic/src/updater/mod.rs
Normal file
@@ -0,0 +1,225 @@
|
|||||||
|
pub use self::error::{Error, Result};
|
||||||
|
use crate::parser::parse_xls;
|
||||||
|
use crate::xls_downloader::{FetchError, XlsDownloader};
|
||||||
|
use base::ScheduleSnapshot;
|
||||||
|
mod error;
|
||||||
|
|
||||||
|
pub enum UpdateSource {
|
||||||
|
Prepared(ScheduleSnapshot),
|
||||||
|
|
||||||
|
Url(String),
|
||||||
|
|
||||||
|
GrabFromSite {
|
||||||
|
yandex_api_key: String,
|
||||||
|
yandex_func_id: String,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct Updater {
|
||||||
|
downloader: XlsDownloader,
|
||||||
|
update_source: UpdateSource,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Updater {
|
||||||
|
/// Constructs a new `ScheduleSnapshot` by downloading and parsing schedule data from the specified URL.
|
||||||
|
///
|
||||||
|
/// This method first checks if the provided URL is the same as the one already configured in the downloader.
|
||||||
|
/// If different, it updates the downloader's URL, fetches the XLS content, parses it, and creates a snapshot.
|
||||||
|
/// Errors are returned for URL conflicts, network issues, download failures, or invalid data.
|
||||||
|
///
|
||||||
|
/// # Arguments
|
||||||
|
///
|
||||||
|
/// * `downloader`: A mutable reference to an `XLSDownloader` implementation used to fetch and parse the schedule data.
|
||||||
|
/// * `url`: The source URL pointing to the XLS file containing schedule data.
|
||||||
|
///
|
||||||
|
/// returns: Result<ScheduleSnapshot, SnapshotCreationError>
|
||||||
|
async fn new_snapshot(downloader: &mut XlsDownloader, url: String) -> Result<ScheduleSnapshot> {
|
||||||
|
let head_result = downloader.set_url(&url).await.map_err(|error| {
|
||||||
|
if let FetchError::Reqwest(error) = &error {
|
||||||
|
sentry::capture_error(&error);
|
||||||
|
}
|
||||||
|
|
||||||
|
Error::ScheduleFetchFailed(error)
|
||||||
|
})?;
|
||||||
|
|
||||||
|
if downloader.etag == Some(head_result.etag) {
|
||||||
|
return Err(Error::SameETag);
|
||||||
|
}
|
||||||
|
|
||||||
|
let xls_data = downloader
|
||||||
|
.fetch(false)
|
||||||
|
.await
|
||||||
|
.map_err(|error| {
|
||||||
|
if let FetchError::Reqwest(error) = &error {
|
||||||
|
sentry::capture_error(&error);
|
||||||
|
}
|
||||||
|
|
||||||
|
Error::ScheduleDownloadFailed(error)
|
||||||
|
})?
|
||||||
|
.data
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
|
let parse_result = parse_xls(&xls_data)?;
|
||||||
|
|
||||||
|
Ok(ScheduleSnapshot {
|
||||||
|
fetched_at: head_result.requested_at,
|
||||||
|
updated_at: head_result.uploaded_at,
|
||||||
|
url,
|
||||||
|
data: parse_result,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Queries the Yandex Cloud Function (FaaS) to obtain a URL for the schedule file.
|
||||||
|
///
|
||||||
|
/// This sends a POST request to the specified Yandex Cloud Function endpoint,
|
||||||
|
/// using the provided API key for authentication. The returned URI is combined
|
||||||
|
/// with the "https://politehnikum-eng.ru" base domain to form the complete URL.
|
||||||
|
///
|
||||||
|
/// # Arguments
|
||||||
|
///
|
||||||
|
/// * `api_key` - Authentication token for Yandex Cloud API
|
||||||
|
/// * `func_id` - ID of the target Yandex Cloud Function to invoke
|
||||||
|
///
|
||||||
|
/// # Returns
|
||||||
|
///
|
||||||
|
/// Result containing:
|
||||||
|
/// - `Ok(String)` - Complete URL constructed from the Function's response
|
||||||
|
/// - `Err(QueryUrlError)` - If the request or response processing fails
|
||||||
|
async fn query_url(api_key: &str, func_id: &str) -> Result<String> {
|
||||||
|
let client = reqwest::Client::new();
|
||||||
|
|
||||||
|
let uri = {
|
||||||
|
// вот бы добавили named-scopes как в котлине,
|
||||||
|
// чтоб мне не пришлось такой хуйнёй страдать.
|
||||||
|
#[allow(unused_assignments)]
|
||||||
|
let mut uri = String::new();
|
||||||
|
let mut counter = 0;
|
||||||
|
|
||||||
|
loop {
|
||||||
|
if counter == 3 {
|
||||||
|
return Err(Error::EmptyUri);
|
||||||
|
}
|
||||||
|
|
||||||
|
counter += 1;
|
||||||
|
|
||||||
|
uri = client
|
||||||
|
.post(format!(
|
||||||
|
"https://functions.yandexcloud.net/{}?integration=raw",
|
||||||
|
func_id
|
||||||
|
))
|
||||||
|
.header("Authorization", format!("Api-Key {}", api_key))
|
||||||
|
.send()
|
||||||
|
.await
|
||||||
|
.map_err(Error::Reqwest)?
|
||||||
|
.text()
|
||||||
|
.await
|
||||||
|
.map_err(Error::Reqwest)?;
|
||||||
|
|
||||||
|
if uri.is_empty() {
|
||||||
|
log::warn!("[{}] Unable to get uri! Retrying in 5 seconds...", counter);
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
|
||||||
|
uri
|
||||||
|
};
|
||||||
|
|
||||||
|
Ok(format!("https://politehnikum-eng.ru{}", uri.trim()))
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Initializes the schedule by fetching the URL from the environment or Yandex Cloud Function (FaaS)
|
||||||
|
/// and creating a [`ScheduleSnapshot`] with the downloaded data.
|
||||||
|
///
|
||||||
|
/// # Arguments
|
||||||
|
///
|
||||||
|
/// * `downloader`: Mutable reference to an `XLSDownloader` implementation used to fetch and parse the schedule
|
||||||
|
/// * `app_env`: Reference to the application environment containing either a predefined URL or Yandex Cloud credentials
|
||||||
|
///
|
||||||
|
/// # Returns
|
||||||
|
///
|
||||||
|
/// Returns `Ok(())` if the snapshot was successfully initialized, or an `Error` if:
|
||||||
|
/// - URL query to Yandex Cloud failed ([`QueryUrlError`])
|
||||||
|
/// - Schedule snapshot creation failed ([`SnapshotCreationError`])
|
||||||
|
pub async fn new(update_source: UpdateSource) -> Result<(Self, ScheduleSnapshot)> {
|
||||||
|
let mut this = Updater {
|
||||||
|
downloader: XlsDownloader::new(),
|
||||||
|
update_source,
|
||||||
|
};
|
||||||
|
|
||||||
|
if let UpdateSource::Prepared(snapshot) = &this.update_source {
|
||||||
|
let snapshot = snapshot.clone();
|
||||||
|
return Ok((this, snapshot));
|
||||||
|
}
|
||||||
|
|
||||||
|
let url = match &this.update_source {
|
||||||
|
UpdateSource::Url(url) => {
|
||||||
|
log::info!("The default link {} will be used", url);
|
||||||
|
url.clone()
|
||||||
|
}
|
||||||
|
UpdateSource::GrabFromSite {
|
||||||
|
yandex_api_key,
|
||||||
|
yandex_func_id,
|
||||||
|
} => {
|
||||||
|
log::info!("Obtaining a link using FaaS...");
|
||||||
|
Self::query_url(yandex_api_key, yandex_func_id).await?
|
||||||
|
}
|
||||||
|
_ => unreachable!(),
|
||||||
|
};
|
||||||
|
|
||||||
|
log::info!("For the initial setup, a link {} will be used", url);
|
||||||
|
|
||||||
|
let snapshot = Self::new_snapshot(&mut this.downloader, url).await?;
|
||||||
|
log::info!("Schedule snapshot successfully created!");
|
||||||
|
|
||||||
|
Ok((this, snapshot))
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Updates the schedule snapshot by querying the latest URL from FaaS and checking for changes.
|
||||||
|
/// If the URL hasn't changed, only updates the [`fetched_at`] timestamp. If changed, downloads
|
||||||
|
/// and parses the new schedule data.
|
||||||
|
///
|
||||||
|
/// # Arguments
|
||||||
|
///
|
||||||
|
/// * `downloader`: XLS file downloader used to fetch and parse the schedule data
|
||||||
|
/// * `app_env`: Application environment containing Yandex Cloud configuration and auto-update settings
|
||||||
|
///
|
||||||
|
/// returns: `Result<(), Error>` - Returns error if URL query fails or schedule parsing encounters issues
|
||||||
|
///
|
||||||
|
/// # Safety
|
||||||
|
///
|
||||||
|
/// Use `unsafe` to access the initialized snapshot, guaranteed valid by prior `init()` call
|
||||||
|
pub async fn update(
|
||||||
|
&mut self,
|
||||||
|
current_snapshot: &ScheduleSnapshot,
|
||||||
|
) -> Result<ScheduleSnapshot> {
|
||||||
|
if let UpdateSource::Prepared(snapshot) = &self.update_source {
|
||||||
|
let mut snapshot = snapshot.clone();
|
||||||
|
snapshot.update();
|
||||||
|
return Ok(snapshot);
|
||||||
|
}
|
||||||
|
|
||||||
|
let url = match &self.update_source {
|
||||||
|
UpdateSource::Url(url) => url.clone(),
|
||||||
|
UpdateSource::GrabFromSite {
|
||||||
|
yandex_api_key,
|
||||||
|
yandex_func_id,
|
||||||
|
} => Self::query_url(yandex_api_key.as_str(), yandex_func_id.as_str()).await?,
|
||||||
|
_ => unreachable!(),
|
||||||
|
};
|
||||||
|
|
||||||
|
let snapshot = match Self::new_snapshot(&mut self.downloader, url).await {
|
||||||
|
Ok(snapshot) => snapshot,
|
||||||
|
Err(Error::SameETag) => {
|
||||||
|
let mut clone = current_snapshot.clone();
|
||||||
|
clone.update();
|
||||||
|
|
||||||
|
clone
|
||||||
|
}
|
||||||
|
Err(error) => return Err(error),
|
||||||
|
};
|
||||||
|
|
||||||
|
Ok(snapshot)
|
||||||
|
}
|
||||||
|
}
|
||||||
253
providers/provider-engels-polytechnic/src/xls_downloader.rs
Normal file
253
providers/provider-engels-polytechnic/src/xls_downloader.rs
Normal file
@@ -0,0 +1,253 @@
|
|||||||
|
use chrono::{DateTime, Utc};
|
||||||
|
use derive_more::{Display, Error};
|
||||||
|
use std::mem::discriminant;
|
||||||
|
use std::sync::Arc;
|
||||||
|
use utoipa::ToSchema;
|
||||||
|
|
||||||
|
/// XLS data retrieval errors.
|
||||||
|
#[derive(Clone, Debug, ToSchema, Display, Error)]
|
||||||
|
pub enum FetchError {
|
||||||
|
/// File url is not set.
|
||||||
|
#[display("The link to the timetable was not provided earlier.")]
|
||||||
|
NoUrlProvided,
|
||||||
|
|
||||||
|
/// Unknown error.
|
||||||
|
#[display("An unknown error occurred while downloading the file.")]
|
||||||
|
#[schema(value_type = String)]
|
||||||
|
Reqwest(Arc<reqwest::Error>),
|
||||||
|
|
||||||
|
/// Server returned a status code different from 200.
|
||||||
|
#[display("Server returned a status code {status_code}.")]
|
||||||
|
BadStatusCode { status_code: u16 },
|
||||||
|
|
||||||
|
/// The url leads to a file of a different type.
|
||||||
|
#[display("The link leads to a file of type '{content_type}'.")]
|
||||||
|
BadContentType { content_type: String },
|
||||||
|
|
||||||
|
/// Server doesn't return expected headers.
|
||||||
|
#[display("Server doesn't return expected header(s) '{expected_header}'.")]
|
||||||
|
BadHeaders { expected_header: String },
|
||||||
|
}
|
||||||
|
|
||||||
|
impl FetchError {
|
||||||
|
pub fn unknown(error: Arc<reqwest::Error>) -> Self {
|
||||||
|
Self::Reqwest(error)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn bad_status_code(status_code: u16) -> Self {
|
||||||
|
Self::BadStatusCode { status_code }
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn bad_content_type(content_type: &str) -> Self {
|
||||||
|
Self::BadContentType {
|
||||||
|
content_type: content_type.to_string(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn bad_headers(expected_header: &str) -> Self {
|
||||||
|
Self::BadHeaders {
|
||||||
|
expected_header: expected_header.to_string(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl PartialEq for FetchError {
|
||||||
|
fn eq(&self, other: &Self) -> bool {
|
||||||
|
discriminant(self) == discriminant(other)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Result of XLS data retrieval.
|
||||||
|
#[derive(Debug, PartialEq)]
|
||||||
|
pub struct FetchOk {
|
||||||
|
/// File upload date.
|
||||||
|
pub uploaded_at: DateTime<Utc>,
|
||||||
|
|
||||||
|
/// Date data received.
|
||||||
|
pub requested_at: DateTime<Utc>,
|
||||||
|
|
||||||
|
/// Etag.
|
||||||
|
pub etag: String,
|
||||||
|
|
||||||
|
/// File data.
|
||||||
|
pub data: Option<Vec<u8>>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl FetchOk {
|
||||||
|
/// Result without file content.
|
||||||
|
pub fn head(uploaded_at: DateTime<Utc>, etag: String) -> Self {
|
||||||
|
FetchOk {
|
||||||
|
uploaded_at,
|
||||||
|
requested_at: Utc::now(),
|
||||||
|
etag,
|
||||||
|
data: None,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Full result.
|
||||||
|
pub fn get(uploaded_at: DateTime<Utc>, etag: String, data: Vec<u8>) -> Self {
|
||||||
|
FetchOk {
|
||||||
|
uploaded_at,
|
||||||
|
requested_at: Utc::now(),
|
||||||
|
etag,
|
||||||
|
data: Some(data),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub type FetchResult = Result<FetchOk, FetchError>;
|
||||||
|
|
||||||
|
pub struct XlsDownloader {
|
||||||
|
pub url: Option<String>,
|
||||||
|
pub etag: Option<String>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl XlsDownloader {
|
||||||
|
pub fn new() -> Self {
|
||||||
|
XlsDownloader {
|
||||||
|
url: None,
|
||||||
|
etag: None,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn fetch_specified(url: &str, head: bool) -> FetchResult {
|
||||||
|
let client = reqwest::Client::new();
|
||||||
|
|
||||||
|
let response = if head {
|
||||||
|
client.head(url)
|
||||||
|
} else {
|
||||||
|
client.get(url)
|
||||||
|
}
|
||||||
|
.header("User-Agent", ua_generator::ua::spoof_chrome_ua())
|
||||||
|
.send()
|
||||||
|
.await
|
||||||
|
.map_err(|e| FetchError::unknown(Arc::new(e)))?;
|
||||||
|
|
||||||
|
if response.status().as_u16() != 200 {
|
||||||
|
return Err(FetchError::bad_status_code(response.status().as_u16()));
|
||||||
|
}
|
||||||
|
|
||||||
|
let headers = response.headers();
|
||||||
|
|
||||||
|
let content_type = headers
|
||||||
|
.get("Content-Type")
|
||||||
|
.ok_or(FetchError::bad_headers("Content-Type"))?;
|
||||||
|
|
||||||
|
let etag = headers
|
||||||
|
.get("etag")
|
||||||
|
.ok_or(FetchError::bad_headers("etag"))?
|
||||||
|
.to_str()
|
||||||
|
.or(Err(FetchError::bad_headers("etag")))?
|
||||||
|
.to_string();
|
||||||
|
|
||||||
|
let last_modified = headers
|
||||||
|
.get("last-modified")
|
||||||
|
.ok_or(FetchError::bad_headers("last-modified"))?;
|
||||||
|
|
||||||
|
if content_type != "application/vnd.ms-excel" {
|
||||||
|
return Err(FetchError::bad_content_type(content_type.to_str().unwrap()));
|
||||||
|
}
|
||||||
|
|
||||||
|
let last_modified = DateTime::parse_from_rfc2822(last_modified.to_str().unwrap())
|
||||||
|
.unwrap()
|
||||||
|
.with_timezone(&Utc);
|
||||||
|
|
||||||
|
Ok(if head {
|
||||||
|
FetchOk::head(last_modified, etag)
|
||||||
|
} else {
|
||||||
|
FetchOk::get(
|
||||||
|
last_modified,
|
||||||
|
etag,
|
||||||
|
response.bytes().await.unwrap().to_vec(),
|
||||||
|
)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn fetch(&self, head: bool) -> FetchResult {
|
||||||
|
if self.url.is_none() {
|
||||||
|
Err(FetchError::NoUrlProvided)
|
||||||
|
} else {
|
||||||
|
Self::fetch_specified(self.url.as_ref().unwrap(), head).await
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn set_url(&mut self, url: &str) -> FetchResult {
|
||||||
|
let result = Self::fetch_specified(url, true).await;
|
||||||
|
|
||||||
|
if result.is_ok() {
|
||||||
|
self.url = Some(url.to_string());
|
||||||
|
}
|
||||||
|
|
||||||
|
result
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use crate::xls_downloader::{FetchError, XlsDownloader};
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
async fn bad_url() {
|
||||||
|
let url = "bad_url";
|
||||||
|
|
||||||
|
let mut downloader = XlsDownloader::new();
|
||||||
|
assert!(downloader.set_url(url).await.is_err());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
async fn bad_status_code() {
|
||||||
|
let url = "https://www.google.com/not-found";
|
||||||
|
|
||||||
|
let mut downloader = XlsDownloader::new();
|
||||||
|
assert_eq!(
|
||||||
|
downloader.set_url(url).await,
|
||||||
|
Err(FetchError::bad_status_code(404))
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
async fn bad_headers() {
|
||||||
|
let url = "https://www.google.com/favicon.ico";
|
||||||
|
|
||||||
|
let mut downloader = XlsDownloader::new();
|
||||||
|
assert_eq!(
|
||||||
|
downloader.set_url(url).await,
|
||||||
|
Err(FetchError::BadHeaders {
|
||||||
|
expected_header: "ETag".to_string(),
|
||||||
|
})
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
async fn bad_content_type() {
|
||||||
|
let url = "https://s3.aero-storage.ldragol.ru/679e5d1145a6ad00843ad3f1/67ddb59fd46303008396ac96%2Fexample.txt";
|
||||||
|
|
||||||
|
let mut downloader = XlsDownloader::new();
|
||||||
|
assert!(downloader.set_url(url).await.is_err());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
async fn ok() {
|
||||||
|
let url = "https://s3.aero-storage.ldragol.ru/679e5d1145a6ad00843ad3f1/67ddb5fad46303008396ac97%2Fschedule.xls";
|
||||||
|
|
||||||
|
let mut downloader = XlsDownloader::new();
|
||||||
|
assert!(downloader.set_url(url).await.is_ok());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
async fn downloader_ok() {
|
||||||
|
let url = "https://s3.aero-storage.ldragol.ru/679e5d1145a6ad00843ad3f1/67ddb5fad46303008396ac97%2Fschedule.xls";
|
||||||
|
|
||||||
|
let mut downloader = XlsDownloader::new();
|
||||||
|
assert!(downloader.set_url(url).await.is_ok());
|
||||||
|
assert!(downloader.fetch(false).await.is_ok());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
async fn downloader_no_url_provided() {
|
||||||
|
let downloader = XlsDownloader::new();
|
||||||
|
|
||||||
|
let result = downloader.fetch(false).await;
|
||||||
|
assert_eq!(result, Err(FetchError::NoUrlProvided));
|
||||||
|
}
|
||||||
|
}
|
||||||
9
providers/src/lib.rs
Normal file
9
providers/src/lib.rs
Normal file
@@ -0,0 +1,9 @@
|
|||||||
|
pub use base;
|
||||||
|
|
||||||
|
pub use provider_engels_polytechnic::EngelsPolytechnicProvider;
|
||||||
|
pub use provider_engels_polytechnic::UpdateSource as EngelsPolytechnicUpdateSource;
|
||||||
|
|
||||||
|
#[cfg(feature = "test")]
|
||||||
|
pub mod test_utils {
|
||||||
|
pub use provider_engels_polytechnic::test_utils as engels_polytechnic;
|
||||||
|
}
|
||||||
BIN
schedule.xls
BIN
schedule.xls
Binary file not shown.
@@ -1,88 +0,0 @@
|
|||||||
use crate::parser::schema::ParseResult;
|
|
||||||
use crate::utility::hasher::DigestHasher;
|
|
||||||
use crate::xls_downloader::basic_impl::BasicXlsDownloader;
|
|
||||||
use actix_web::web;
|
|
||||||
use chrono::{DateTime, Utc};
|
|
||||||
use diesel::{Connection, PgConnection};
|
|
||||||
use firebase_messaging_rs::FCMClient;
|
|
||||||
use sha1::{Digest, Sha1};
|
|
||||||
use std::env;
|
|
||||||
use std::hash::Hash;
|
|
||||||
use std::sync::Mutex;
|
|
||||||
|
|
||||||
#[derive(Clone)]
|
|
||||||
pub struct Schedule {
|
|
||||||
pub etag: String,
|
|
||||||
pub fetched_at: DateTime<Utc>,
|
|
||||||
pub updated_at: DateTime<Utc>,
|
|
||||||
pub parsed_at: DateTime<Utc>,
|
|
||||||
pub data: ParseResult,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Clone)]
|
|
||||||
pub struct VkId {
|
|
||||||
pub client_id: i32,
|
|
||||||
pub redirect_url: String,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl VkId {
|
|
||||||
pub fn new() -> Self {
|
|
||||||
Self {
|
|
||||||
client_id: env::var("VKID_CLIENT_ID")
|
|
||||||
.expect("VKID_CLIENT_ID must be set")
|
|
||||||
.parse()
|
|
||||||
.expect("VKID_CLIENT_ID must be integer"),
|
|
||||||
redirect_url: env::var("VKID_REDIRECT_URI").expect("VKID_REDIRECT_URI must be set"),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Schedule {
|
|
||||||
pub fn hash(&self) -> String {
|
|
||||||
let mut hasher = DigestHasher::from(Sha1::new());
|
|
||||||
|
|
||||||
self.etag.hash(&mut hasher);
|
|
||||||
|
|
||||||
self.data.teachers.iter().for_each(|e| e.hash(&mut hasher));
|
|
||||||
self.data.groups.iter().for_each(|e| e.hash(&mut hasher));
|
|
||||||
|
|
||||||
hasher.finalize()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Common data provided to endpoints.
|
|
||||||
pub struct AppState {
|
|
||||||
pub downloader: Mutex<BasicXlsDownloader>,
|
|
||||||
pub schedule: Mutex<Option<Schedule>>,
|
|
||||||
pub database: Mutex<PgConnection>,
|
|
||||||
pub vk_id: VkId,
|
|
||||||
pub fcm_client: Option<Mutex<FCMClient>>, // в рантайме не меняется, так что опционален мьютекс, а не данные в нём.
|
|
||||||
}
|
|
||||||
|
|
||||||
impl AppState {
|
|
||||||
pub async fn new() -> Self {
|
|
||||||
let database_url = env::var("DATABASE_URL").expect("DATABASE_URL must be set");
|
|
||||||
|
|
||||||
Self {
|
|
||||||
downloader: Mutex::new(BasicXlsDownloader::new()),
|
|
||||||
schedule: Mutex::new(None),
|
|
||||||
database: Mutex::new(
|
|
||||||
PgConnection::establish(&database_url)
|
|
||||||
.unwrap_or_else(|_| panic!("Error connecting to {}", database_url)),
|
|
||||||
),
|
|
||||||
vk_id: VkId::new(),
|
|
||||||
fcm_client: if env::var("GOOGLE_APPLICATION_CREDENTIALS").is_ok() {
|
|
||||||
Some(Mutex::new(
|
|
||||||
FCMClient::new().await.expect("FCM client must be created"),
|
|
||||||
))
|
|
||||||
} else {
|
|
||||||
None
|
|
||||||
},
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Create a new object web::Data<AppState>.
|
|
||||||
pub async fn app_state() -> web::Data<AppState> {
|
|
||||||
web::Data::new(AppState::new().await)
|
|
||||||
}
|
|
||||||
@@ -1,163 +0,0 @@
|
|||||||
pub mod users {
|
|
||||||
use crate::app_state::AppState;
|
|
||||||
use crate::database::models::User;
|
|
||||||
use crate::database::schema::users::dsl::users;
|
|
||||||
use crate::database::schema::users::dsl::*;
|
|
||||||
use crate::utility::mutex::MutexScope;
|
|
||||||
use actix_web::web;
|
|
||||||
use diesel::{ExpressionMethods, QueryResult, insert_into};
|
|
||||||
use diesel::{QueryDsl, RunQueryDsl};
|
|
||||||
use diesel::{SaveChangesDsl, SelectableHelper};
|
|
||||||
|
|
||||||
pub fn get(state: &web::Data<AppState>, _id: &String) -> QueryResult<User> {
|
|
||||||
state.database.scope(|conn| {
|
|
||||||
users
|
|
||||||
.filter(id.eq(_id))
|
|
||||||
.select(User::as_select())
|
|
||||||
.first(conn)
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn get_by_username(state: &web::Data<AppState>, _username: &String) -> QueryResult<User> {
|
|
||||||
state.database.scope(|conn| {
|
|
||||||
users
|
|
||||||
.filter(username.eq(_username))
|
|
||||||
.select(User::as_select())
|
|
||||||
.first(conn)
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
//noinspection RsTraitObligations
|
|
||||||
pub fn get_by_vk_id(state: &web::Data<AppState>, _vk_id: i32) -> QueryResult<User> {
|
|
||||||
state.database.scope(|conn| {
|
|
||||||
users
|
|
||||||
.filter(vk_id.eq(_vk_id))
|
|
||||||
.select(User::as_select())
|
|
||||||
.first(conn)
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
//noinspection DuplicatedCode
|
|
||||||
pub fn contains_by_username(state: &web::Data<AppState>, _username: &String) -> bool {
|
|
||||||
// и как это нахуй сократить блять примеров нихуя нет, нихуя не работает
|
|
||||||
// как меня этот раст заебал уже
|
|
||||||
state.database.scope(|conn| {
|
|
||||||
match users
|
|
||||||
.filter(username.eq(_username))
|
|
||||||
.count()
|
|
||||||
.get_result::<i64>(conn)
|
|
||||||
{
|
|
||||||
Ok(count) => count > 0,
|
|
||||||
Err(_) => false,
|
|
||||||
}
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
//noinspection DuplicatedCode
|
|
||||||
//noinspection RsTraitObligations
|
|
||||||
pub fn contains_by_vk_id(state: &web::Data<AppState>, _vk_id: i32) -> bool {
|
|
||||||
state.database.scope(|conn| {
|
|
||||||
match users
|
|
||||||
.filter(vk_id.eq(_vk_id))
|
|
||||||
.count()
|
|
||||||
.get_result::<i64>(conn)
|
|
||||||
{
|
|
||||||
Ok(count) => count > 0,
|
|
||||||
Err(_) => false,
|
|
||||||
}
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn insert(state: &web::Data<AppState>, user: &User) -> QueryResult<usize> {
|
|
||||||
state
|
|
||||||
.database
|
|
||||||
.scope(|conn| insert_into(users).values(user).execute(conn))
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Function declaration [User::save][UserSave::save].
|
|
||||||
pub trait UserSave {
|
|
||||||
/// Saves the user's changes to the database.
|
|
||||||
///
|
|
||||||
/// # Arguments
|
|
||||||
///
|
|
||||||
/// * `state`: The state of the actix-web application that stores the mutex of the [connection][diesel::PgConnection].
|
|
||||||
///
|
|
||||||
/// returns: `QueryResult<User>`
|
|
||||||
///
|
|
||||||
/// # Examples
|
|
||||||
///
|
|
||||||
/// ```
|
|
||||||
/// use crate::database::driver::users;
|
|
||||||
///
|
|
||||||
/// #[derive(Deserialize)]
|
|
||||||
/// struct Params {
|
|
||||||
/// pub username: String,
|
|
||||||
/// }
|
|
||||||
///
|
|
||||||
/// #[patch("/")]
|
|
||||||
/// async fn patch_user(
|
|
||||||
/// app_state: web::Data<AppState>,
|
|
||||||
/// user: SyncExtractor<User>,
|
|
||||||
/// web::Query(params): web::Query<Params>,
|
|
||||||
/// ) -> web::Json<User> {
|
|
||||||
/// let mut user = user.into_inner();
|
|
||||||
///
|
|
||||||
/// user.username = params.username;
|
|
||||||
///
|
|
||||||
/// match user.save(&app_state) {
|
|
||||||
/// Ok(user) => web::Json(user),
|
|
||||||
/// Err(e) => {
|
|
||||||
/// eprintln!("Failed to save user: {e}");
|
|
||||||
/// panic!();
|
|
||||||
/// }
|
|
||||||
/// }
|
|
||||||
/// }
|
|
||||||
/// ```
|
|
||||||
fn save(&self, state: &web::Data<AppState>) -> QueryResult<User>;
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Implementation of [UserSave][UserSave] trait.
|
|
||||||
impl UserSave for User {
|
|
||||||
fn save(&self, state: &web::Data<AppState>) -> QueryResult<User> {
|
|
||||||
state.database.scope(|conn| self.save_changes::<Self>(conn))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(test)]
|
|
||||||
pub fn delete_by_username(state: &web::Data<AppState>, _username: &String) -> bool {
|
|
||||||
state.database.scope(|conn| {
|
|
||||||
match diesel::delete(users.filter(username.eq(_username))).execute(conn) {
|
|
||||||
Ok(count) => count > 0,
|
|
||||||
Err(_) => false,
|
|
||||||
}
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(test)]
|
|
||||||
pub fn insert_or_ignore(state: &web::Data<AppState>, user: &User) -> QueryResult<usize> {
|
|
||||||
state.database.scope(|conn| {
|
|
||||||
insert_into(users)
|
|
||||||
.values(user)
|
|
||||||
.on_conflict_do_nothing()
|
|
||||||
.execute(conn)
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub mod fcm {
|
|
||||||
use crate::app_state::AppState;
|
|
||||||
use crate::database::models::{FCM, User};
|
|
||||||
use crate::utility::mutex::MutexScope;
|
|
||||||
use actix_web::web;
|
|
||||||
use diesel::QueryDsl;
|
|
||||||
use diesel::RunQueryDsl;
|
|
||||||
use diesel::{BelongingToDsl, QueryResult, SelectableHelper};
|
|
||||||
|
|
||||||
pub fn from_user(state: &web::Data<AppState>, user: &User) -> QueryResult<FCM> {
|
|
||||||
state.database.scope(|conn| {
|
|
||||||
FCM::belonging_to(&user)
|
|
||||||
.select(FCM::as_select())
|
|
||||||
.get_result(conn)
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,3 +0,0 @@
|
|||||||
pub mod schema;
|
|
||||||
pub mod models;
|
|
||||||
pub mod driver;
|
|
||||||
@@ -1,84 +0,0 @@
|
|||||||
use actix_macros::ResponderJson;
|
|
||||||
use diesel::QueryId;
|
|
||||||
use diesel::prelude::*;
|
|
||||||
use serde::{Deserialize, Serialize};
|
|
||||||
use utoipa::ToSchema;
|
|
||||||
|
|
||||||
#[derive(
|
|
||||||
Copy, Clone, PartialEq, Debug, Serialize, Deserialize, diesel_derive_enum::DbEnum, ToSchema,
|
|
||||||
)]
|
|
||||||
#[ExistingTypePath = "crate::database::schema::sql_types::UserRole"]
|
|
||||||
#[DbValueStyle = "UPPERCASE"]
|
|
||||||
#[serde(rename_all = "UPPERCASE")]
|
|
||||||
pub enum UserRole {
|
|
||||||
Student,
|
|
||||||
Teacher,
|
|
||||||
Admin,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(
|
|
||||||
Identifiable,
|
|
||||||
AsChangeset,
|
|
||||||
Queryable,
|
|
||||||
QueryId,
|
|
||||||
Selectable,
|
|
||||||
Serialize,
|
|
||||||
Insertable,
|
|
||||||
Debug,
|
|
||||||
ToSchema,
|
|
||||||
ResponderJson,
|
|
||||||
)]
|
|
||||||
#[diesel(table_name = crate::database::schema::users)]
|
|
||||||
#[diesel(treat_none_as_null = true)]
|
|
||||||
pub struct User {
|
|
||||||
/// Account UUID.
|
|
||||||
pub id: String,
|
|
||||||
|
|
||||||
/// User name.
|
|
||||||
pub username: String,
|
|
||||||
|
|
||||||
/// BCrypt password hash.
|
|
||||||
pub password: String,
|
|
||||||
|
|
||||||
/// ID of the linked VK account.
|
|
||||||
pub vk_id: Option<i32>,
|
|
||||||
|
|
||||||
/// JWT access token.
|
|
||||||
pub access_token: String,
|
|
||||||
|
|
||||||
/// Group.
|
|
||||||
pub group: String,
|
|
||||||
|
|
||||||
/// Role.
|
|
||||||
pub role: UserRole,
|
|
||||||
|
|
||||||
/// Version of the installed Polytechnic+ application.
|
|
||||||
pub version: String,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(
|
|
||||||
Debug,
|
|
||||||
Clone,
|
|
||||||
Serialize,
|
|
||||||
Identifiable,
|
|
||||||
Queryable,
|
|
||||||
Selectable,
|
|
||||||
Insertable,
|
|
||||||
AsChangeset,
|
|
||||||
Associations,
|
|
||||||
ToSchema,
|
|
||||||
ResponderJson,
|
|
||||||
)]
|
|
||||||
#[diesel(belongs_to(User))]
|
|
||||||
#[diesel(table_name = crate::database::schema::fcm)]
|
|
||||||
#[diesel(primary_key(user_id))]
|
|
||||||
pub struct FCM {
|
|
||||||
/// Account UUID.
|
|
||||||
pub user_id: String,
|
|
||||||
|
|
||||||
/// FCM token.
|
|
||||||
pub token: String,
|
|
||||||
|
|
||||||
/// List of topics subscribed to by the user.
|
|
||||||
pub topics: Vec<Option<String>>,
|
|
||||||
}
|
|
||||||
@@ -1,38 +0,0 @@
|
|||||||
// @generated automatically by Diesel CLI.
|
|
||||||
|
|
||||||
pub mod sql_types {
|
|
||||||
#[derive(diesel::query_builder::QueryId, Clone, diesel::sql_types::SqlType)]
|
|
||||||
#[diesel(postgres_type(name = "user_role"))]
|
|
||||||
pub struct UserRole;
|
|
||||||
}
|
|
||||||
|
|
||||||
diesel::table! {
|
|
||||||
fcm (user_id) {
|
|
||||||
user_id -> Text,
|
|
||||||
token -> Text,
|
|
||||||
topics -> Array<Nullable<Text>>,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
diesel::table! {
|
|
||||||
use diesel::sql_types::*;
|
|
||||||
use super::sql_types::UserRole;
|
|
||||||
|
|
||||||
users (id) {
|
|
||||||
id -> Text,
|
|
||||||
username -> Text,
|
|
||||||
password -> Text,
|
|
||||||
vk_id -> Nullable<Int4>,
|
|
||||||
access_token -> Text,
|
|
||||||
group -> Text,
|
|
||||||
role -> UserRole,
|
|
||||||
version -> Text,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
diesel::joinable!(fcm -> users (user_id));
|
|
||||||
|
|
||||||
diesel::allow_tables_to_appear_in_same_query!(
|
|
||||||
fcm,
|
|
||||||
users,
|
|
||||||
);
|
|
||||||
@@ -1,24 +1,24 @@
|
|||||||
use crate::app_state::AppState;
|
use crate::extractors::base::FromRequestAsync;
|
||||||
use crate::database::driver;
|
use crate::state::AppState;
|
||||||
use crate::database::models::{FCM, User};
|
use crate::utility::req_auth;
|
||||||
use crate::extractors::base::{FromRequestSync, SyncExtractor};
|
use crate::utility::req_auth::get_claims_from_req;
|
||||||
use crate::utility::jwt;
|
use actix_macros::MiddlewareError;
|
||||||
use actix_macros::ResponseErrorMessage;
|
|
||||||
use actix_web::body::BoxBody;
|
use actix_web::body::BoxBody;
|
||||||
use actix_web::dev::Payload;
|
use actix_web::dev::Payload;
|
||||||
use actix_web::http::header;
|
use actix_web::{web, HttpRequest};
|
||||||
use actix_web::{FromRequest, HttpRequest, web};
|
use database::entity::{User, UserType};
|
||||||
|
use database::query::Query;
|
||||||
use derive_more::Display;
|
use derive_more::Display;
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
use std::fmt::Debug;
|
use std::fmt::Debug;
|
||||||
|
|
||||||
#[derive(Clone, Debug, Serialize, Deserialize, Display, ResponseErrorMessage)]
|
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Display, MiddlewareError)]
|
||||||
#[status_code = "actix_web::http::StatusCode::UNAUTHORIZED"]
|
#[status_code = "actix_web::http::StatusCode::UNAUTHORIZED"]
|
||||||
#[serde(rename_all = "SCREAMING_SNAKE_CASE")]
|
#[serde(rename_all = "SCREAMING_SNAKE_CASE")]
|
||||||
pub enum Error {
|
pub enum Error {
|
||||||
/// There is no Authorization header in the request.
|
/// There is no Authorization header or cookie in the request.
|
||||||
#[display("No Authorization header found")]
|
#[display("No Authorization header or cookie found")]
|
||||||
NoHeader,
|
NoHeaderOrCookieFound,
|
||||||
|
|
||||||
/// Unknown authorization type other than Bearer.
|
/// Unknown authorization type other than Bearer.
|
||||||
#[display("Bearer token is required")]
|
#[display("Bearer token is required")]
|
||||||
@@ -28,83 +28,53 @@ pub enum Error {
|
|||||||
#[display("Invalid or expired access token")]
|
#[display("Invalid or expired access token")]
|
||||||
InvalidAccessToken,
|
InvalidAccessToken,
|
||||||
|
|
||||||
|
/// Default user is required.
|
||||||
|
#[display("Non-default user type is owning this access token")]
|
||||||
|
#[status_code = "actix_web::http::StatusCode::FORBIDDEN"]
|
||||||
|
NonDefaultUserType,
|
||||||
|
|
||||||
/// The user bound to the token is not found in the database.
|
/// The user bound to the token is not found in the database.
|
||||||
#[display("No user associated with access token")]
|
#[display("No user associated with access token")]
|
||||||
NoUser,
|
NoUser,
|
||||||
|
|
||||||
|
/// User doesn't have required role.
|
||||||
|
#[display("You don't have sufficient rights")]
|
||||||
|
#[status_code = "actix_web::http::StatusCode::FORBIDDEN"]
|
||||||
|
InsufficientRights,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Error {
|
impl From<req_auth::Error> for Error {
|
||||||
pub fn into_err(self) -> actix_web::Error {
|
fn from(value: req_auth::Error) -> Self {
|
||||||
actix_web::Error::from(self)
|
match value {
|
||||||
|
req_auth::Error::NoHeaderOrCookieFound => Error::NoHeaderOrCookieFound,
|
||||||
|
req_auth::Error::UnknownAuthorizationType => Error::UnknownAuthorizationType,
|
||||||
|
req_auth::Error::InvalidAccessToken => Error::InvalidAccessToken,
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// User extractor from request with Bearer access token.
|
/// User extractor from request with Bearer access token.
|
||||||
impl FromRequestSync for User {
|
impl FromRequestAsync for User {
|
||||||
type Error = actix_web::Error;
|
type Error = Error;
|
||||||
|
|
||||||
fn from_request_sync(req: &HttpRequest, _: &mut Payload) -> Result<Self, Self::Error> {
|
async fn from_request_async(
|
||||||
let authorization = req
|
req: &HttpRequest,
|
||||||
.headers()
|
_payload: &mut Payload,
|
||||||
.get(header::AUTHORIZATION)
|
) -> Result<Self, Self::Error> {
|
||||||
.ok_or(Error::NoHeader.into_err())?
|
let claims = get_claims_from_req(req).map_err(Error::from)?;
|
||||||
.to_str()
|
|
||||||
.map_err(|_| Error::NoHeader.into_err())?
|
|
||||||
.to_string();
|
|
||||||
|
|
||||||
let parts: Vec<&str> = authorization.split(' ').collect();
|
if claims.user_type.unwrap_or(UserType::Default) != UserType::Default {
|
||||||
|
return Err(Error::NonDefaultUserType);
|
||||||
if parts.len() != 2 || parts[0] != "Bearer" {
|
|
||||||
return Err(Error::UnknownAuthorizationType.into_err());
|
|
||||||
}
|
}
|
||||||
|
|
||||||
let user_id = jwt::verify_and_decode(&parts[1].to_string())
|
let db = req
|
||||||
.map_err(|_| Error::InvalidAccessToken.into_err())?;
|
.app_data::<web::Data<AppState>>()
|
||||||
|
.unwrap()
|
||||||
|
.get_database();
|
||||||
|
|
||||||
let app_state = req.app_data::<web::Data<AppState>>().unwrap();
|
match Query::find_user_by_id(db, &claims.id).await {
|
||||||
|
Ok(Some(user)) => Ok(user),
|
||||||
driver::users::get(&app_state, &user_id).map_err(|_| Error::NoUser.into())
|
_ => Err(Error::NoUser),
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
pub struct UserExtractor<const FCM: bool> {
|
|
||||||
user: User,
|
|
||||||
|
|
||||||
fcm: Option<FCM>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<const FCM: bool> UserExtractor<{ FCM }> {
|
|
||||||
pub fn user(&self) -> &User {
|
|
||||||
&self.user
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn fcm(&self) -> &Option<FCM> {
|
|
||||||
if !FCM {
|
|
||||||
panic!("FCM marked as not required, but it has been requested")
|
|
||||||
}
|
|
||||||
|
|
||||||
&self.fcm
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Extractor of user and additional parameters from request with Bearer token.
|
|
||||||
impl<const FCM: bool> FromRequestSync for UserExtractor<{ FCM }> {
|
|
||||||
type Error = actix_web::Error;
|
|
||||||
|
|
||||||
fn from_request_sync(req: &HttpRequest, payload: &mut Payload) -> Result<Self, Self::Error> {
|
|
||||||
let user = SyncExtractor::<User>::from_request(req, payload)
|
|
||||||
.into_inner()?
|
|
||||||
.into_inner();
|
|
||||||
|
|
||||||
let app_state = req.app_data::<web::Data<AppState>>().unwrap();
|
|
||||||
|
|
||||||
Ok(Self {
|
|
||||||
fcm: if FCM {
|
|
||||||
driver::fcm::from_user(&app_state, &user).ok()
|
|
||||||
} else {
|
|
||||||
None
|
|
||||||
},
|
|
||||||
user,
|
|
||||||
})
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -5,7 +5,6 @@ use std::future::{Ready, ready};
|
|||||||
use std::ops;
|
use std::ops;
|
||||||
|
|
||||||
/// # Async extractor.
|
/// # Async extractor.
|
||||||
|
|
||||||
/// Asynchronous object extractor from a query.
|
/// Asynchronous object extractor from a query.
|
||||||
pub struct AsyncExtractor<T>(T);
|
pub struct AsyncExtractor<T>(T);
|
||||||
|
|
||||||
@@ -57,18 +56,22 @@ pub trait FromRequestAsync: Sized {
|
|||||||
/// web::Json(user)
|
/// web::Json(user)
|
||||||
/// }
|
/// }
|
||||||
/// ```
|
/// ```
|
||||||
async fn from_request_async(req: HttpRequest, payload: Payload) -> Result<Self, Self::Error>;
|
async fn from_request_async(
|
||||||
|
req: &HttpRequest,
|
||||||
|
payload: &mut Payload,
|
||||||
|
) -> Result<Self, Self::Error>;
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<T: FromRequestAsync> FromRequest for AsyncExtractor<T> {
|
impl<T: FromRequestAsync> FromRequest for AsyncExtractor<T> {
|
||||||
type Error = T::Error;
|
type Error = T::Error;
|
||||||
type Future = LocalBoxFuture<'static, Result<Self, Self::Error>>;
|
type Future = LocalBoxFuture<'static, Result<Self, Self::Error>>;
|
||||||
|
|
||||||
fn from_request(req: &HttpRequest, payload: &mut Payload) -> Self::Future {
|
fn from_request(req: &HttpRequest, _payload: &mut Payload) -> Self::Future {
|
||||||
let req = req.clone();
|
let req = req.clone();
|
||||||
let payload = payload.take();
|
let mut payload = Payload::None;
|
||||||
|
|
||||||
Box::pin(async move {
|
Box::pin(async move {
|
||||||
T::from_request_async(req, payload)
|
T::from_request_async(&req, &mut payload)
|
||||||
.await
|
.await
|
||||||
.map(|res| Self(res))
|
.map(|res| Self(res))
|
||||||
})
|
})
|
||||||
@@ -76,12 +79,12 @@ impl<T: FromRequestAsync> FromRequest for AsyncExtractor<T> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// # Sync extractor.
|
/// # Sync extractor.
|
||||||
|
|
||||||
/// Synchronous object extractor from a query.
|
/// Synchronous object extractor from a query.
|
||||||
pub struct SyncExtractor<T>(T);
|
pub struct SyncExtractor<T>(T);
|
||||||
|
|
||||||
impl<T> SyncExtractor<T> {
|
impl<T> SyncExtractor<T> {
|
||||||
/// Retrieving an object extracted with the extractor.
|
/// Retrieving an object extracted with the extractor.
|
||||||
|
#[allow(unused)]
|
||||||
pub fn into_inner(self) -> T {
|
pub fn into_inner(self) -> T {
|
||||||
self.0
|
self.0
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1 +0,0 @@
|
|||||||
pub mod parser;
|
|
||||||
101
src/main.rs
101
src/main.rs
@@ -1,20 +1,17 @@
|
|||||||
use crate::app_state::{AppState, app_state};
|
use crate::middlewares::authorization::{JWTAuthorizationBuilder, ServiceConfig};
|
||||||
use crate::middlewares::authorization::JWTAuthorization;
|
|
||||||
use crate::middlewares::content_type::ContentTypeBootstrap;
|
use crate::middlewares::content_type::ContentTypeBootstrap;
|
||||||
|
use crate::state::{new_app_state, AppState};
|
||||||
use actix_web::dev::{ServiceFactory, ServiceRequest};
|
use actix_web::dev::{ServiceFactory, ServiceRequest};
|
||||||
use actix_web::{App, Error, HttpServer};
|
use actix_web::{App, Error, HttpServer};
|
||||||
|
use database::entity::sea_orm_active_enums::UserRole;
|
||||||
use dotenvy::dotenv;
|
use dotenvy::dotenv;
|
||||||
|
use log::info;
|
||||||
use std::io;
|
use std::io;
|
||||||
use utoipa_actix_web::AppExt;
|
|
||||||
use utoipa_actix_web::scope::Scope;
|
use utoipa_actix_web::scope::Scope;
|
||||||
|
use utoipa_actix_web::AppExt;
|
||||||
use utoipa_rapidoc::RapiDoc;
|
use utoipa_rapidoc::RapiDoc;
|
||||||
|
|
||||||
mod app_state;
|
mod state;
|
||||||
|
|
||||||
mod database;
|
|
||||||
|
|
||||||
mod parser;
|
|
||||||
mod xls_downloader;
|
|
||||||
|
|
||||||
mod extractors;
|
mod extractors;
|
||||||
mod middlewares;
|
mod middlewares;
|
||||||
@@ -30,6 +27,22 @@ pub fn get_api_scope<
|
|||||||
>(
|
>(
|
||||||
scope: I,
|
scope: I,
|
||||||
) -> Scope<T> {
|
) -> Scope<T> {
|
||||||
|
let admin_scope = {
|
||||||
|
let service_user_scope =
|
||||||
|
utoipa_actix_web::scope("/service-users").service(routes::admin::service_users::create);
|
||||||
|
|
||||||
|
utoipa_actix_web::scope("/admin")
|
||||||
|
.wrap(
|
||||||
|
JWTAuthorizationBuilder::new()
|
||||||
|
.with_default(Some(ServiceConfig {
|
||||||
|
allow_service: false,
|
||||||
|
user_roles: Some(&[UserRole::Admin]),
|
||||||
|
}))
|
||||||
|
.build(),
|
||||||
|
)
|
||||||
|
.service(service_user_scope)
|
||||||
|
};
|
||||||
|
|
||||||
let auth_scope = utoipa_actix_web::scope("/auth")
|
let auth_scope = utoipa_actix_web::scope("/auth")
|
||||||
.service(routes::auth::sign_in)
|
.service(routes::auth::sign_in)
|
||||||
.service(routes::auth::sign_in_vk)
|
.service(routes::auth::sign_in_vk)
|
||||||
@@ -37,43 +50,83 @@ pub fn get_api_scope<
|
|||||||
.service(routes::auth::sign_up_vk);
|
.service(routes::auth::sign_up_vk);
|
||||||
|
|
||||||
let users_scope = utoipa_actix_web::scope("/users")
|
let users_scope = utoipa_actix_web::scope("/users")
|
||||||
.wrap(JWTAuthorization::default())
|
.wrap(
|
||||||
|
JWTAuthorizationBuilder::new()
|
||||||
|
.add_paths(
|
||||||
|
["/by/id/{id}", "/by/telegram-id/{id}"],
|
||||||
|
Some(ServiceConfig {
|
||||||
|
allow_service: true,
|
||||||
|
user_roles: Some(&[UserRole::Admin]),
|
||||||
|
}),
|
||||||
|
)
|
||||||
|
.build(),
|
||||||
|
)
|
||||||
|
.service(
|
||||||
|
utoipa_actix_web::scope("/by")
|
||||||
|
.service(routes::users::by::by_id)
|
||||||
|
.service(routes::users::by::by_telegram_id),
|
||||||
|
)
|
||||||
.service(routes::users::change_group)
|
.service(routes::users::change_group)
|
||||||
.service(routes::users::change_username)
|
.service(routes::users::change_username)
|
||||||
.service(routes::users::me);
|
.service(routes::users::me);
|
||||||
|
|
||||||
let schedule_scope = utoipa_actix_web::scope("/schedule")
|
let schedule_scope = utoipa_actix_web::scope("/schedule")
|
||||||
.wrap(JWTAuthorization {
|
.wrap(
|
||||||
ignore: &["/group-names", "/teacher-names"],
|
JWTAuthorizationBuilder::new()
|
||||||
})
|
.with_default(Some(ServiceConfig {
|
||||||
.service(routes::schedule::schedule)
|
allow_service: true,
|
||||||
.service(routes::schedule::update_download_url)
|
user_roles: None,
|
||||||
|
}))
|
||||||
|
.add_paths(["/group-names", "/teacher-names"], None)
|
||||||
|
.add_paths(
|
||||||
|
["/"],
|
||||||
|
Some(ServiceConfig {
|
||||||
|
allow_service: true,
|
||||||
|
user_roles: Some(&[UserRole::Admin]),
|
||||||
|
}),
|
||||||
|
)
|
||||||
|
.add_paths(
|
||||||
|
["/group"],
|
||||||
|
Some(ServiceConfig {
|
||||||
|
allow_service: false,
|
||||||
|
user_roles: None,
|
||||||
|
}),
|
||||||
|
)
|
||||||
|
.build(),
|
||||||
|
)
|
||||||
.service(routes::schedule::cache_status)
|
.service(routes::schedule::cache_status)
|
||||||
|
.service(routes::schedule::schedule)
|
||||||
.service(routes::schedule::group)
|
.service(routes::schedule::group)
|
||||||
|
.service(routes::schedule::group_by_name)
|
||||||
.service(routes::schedule::group_names)
|
.service(routes::schedule::group_names)
|
||||||
.service(routes::schedule::teacher)
|
.service(routes::schedule::teacher)
|
||||||
.service(routes::schedule::teacher_names);
|
.service(routes::schedule::teacher_names);
|
||||||
|
|
||||||
let fcm_scope = utoipa_actix_web::scope("/fcm")
|
let flow_scope = utoipa_actix_web::scope("/flow")
|
||||||
.wrap(JWTAuthorization::default())
|
.wrap(
|
||||||
.service(routes::fcm::update_callback)
|
JWTAuthorizationBuilder::new()
|
||||||
.service(routes::fcm::set_token);
|
.add_paths(["/telegram-auth"], None)
|
||||||
|
.build(),
|
||||||
|
)
|
||||||
|
.service(routes::flow::telegram_auth)
|
||||||
|
.service(routes::flow::telegram_complete);
|
||||||
|
|
||||||
let vk_id_scope = utoipa_actix_web::scope("/vkid") //
|
let vk_id_scope = utoipa_actix_web::scope("/vkid") //
|
||||||
.service(routes::vk_id::oauth);
|
.service(routes::vk_id::oauth);
|
||||||
|
|
||||||
utoipa_actix_web::scope(scope)
|
utoipa_actix_web::scope(scope)
|
||||||
|
.service(admin_scope)
|
||||||
.service(auth_scope)
|
.service(auth_scope)
|
||||||
.service(users_scope)
|
.service(users_scope)
|
||||||
.service(schedule_scope)
|
.service(schedule_scope)
|
||||||
.service(fcm_scope)
|
.service(flow_scope)
|
||||||
.service(vk_id_scope)
|
.service(vk_id_scope)
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn async_main() -> io::Result<()> {
|
async fn async_main() -> io::Result<()> {
|
||||||
println!("Starting server...");
|
info!("Запуск сервера...");
|
||||||
|
|
||||||
let app_state = app_state().await;
|
let app_state = new_app_state(None).await.unwrap();
|
||||||
|
|
||||||
HttpServer::new(move || {
|
HttpServer::new(move || {
|
||||||
let (app, api) = App::new()
|
let (app, api) = App::new()
|
||||||
@@ -112,9 +165,7 @@ fn main() -> io::Result<()> {
|
|||||||
},
|
},
|
||||||
));
|
));
|
||||||
|
|
||||||
unsafe { std::env::set_var("RUST_BACKTRACE", "1") };
|
let _ = dotenv();
|
||||||
|
|
||||||
dotenv().unwrap();
|
|
||||||
|
|
||||||
env_logger::init();
|
env_logger::init();
|
||||||
|
|
||||||
|
|||||||
@@ -1,27 +1,73 @@
|
|||||||
use crate::database::models::User;
|
|
||||||
use crate::extractors::authorized_user;
|
use crate::extractors::authorized_user;
|
||||||
use crate::extractors::base::FromRequestSync;
|
use crate::state::AppState;
|
||||||
|
use crate::utility::req_auth::get_claims_from_req;
|
||||||
use actix_web::body::{BoxBody, EitherBody};
|
use actix_web::body::{BoxBody, EitherBody};
|
||||||
use actix_web::dev::{Payload, Service, ServiceRequest, ServiceResponse, Transform, forward_ready};
|
use actix_web::dev::{forward_ready, Service, ServiceRequest, ServiceResponse, Transform};
|
||||||
use actix_web::{Error, HttpRequest, ResponseError};
|
use actix_web::{web, Error, HttpRequest, ResponseError};
|
||||||
|
use database::entity::sea_orm_active_enums::UserRole;
|
||||||
|
use database::entity::UserType;
|
||||||
|
use database::query::Query;
|
||||||
use futures_util::future::LocalBoxFuture;
|
use futures_util::future::LocalBoxFuture;
|
||||||
use std::future::{Ready, ready};
|
use std::future::{ready, Ready};
|
||||||
|
use std::ops::Deref;
|
||||||
|
use std::rc::Rc;
|
||||||
|
use std::sync::Arc;
|
||||||
|
|
||||||
|
#[derive(Default, Clone)]
|
||||||
|
pub struct ServiceConfig {
|
||||||
|
/// Allow service users to access endpoints.
|
||||||
|
pub allow_service: bool,
|
||||||
|
|
||||||
|
/// List of required roles to access endpoints.
|
||||||
|
pub user_roles: Option<&'static [UserRole]>,
|
||||||
|
}
|
||||||
|
|
||||||
|
type ServiceKV = (Arc<[&'static str]>, Option<ServiceConfig>);
|
||||||
|
|
||||||
|
pub struct JWTAuthorizationBuilder {
|
||||||
|
pub default_config: Option<ServiceConfig>,
|
||||||
|
pub path_configs: Vec<ServiceKV>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl JWTAuthorizationBuilder {
|
||||||
|
pub fn new() -> Self {
|
||||||
|
JWTAuthorizationBuilder {
|
||||||
|
default_config: Some(ServiceConfig::default()),
|
||||||
|
path_configs: vec![],
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn with_default(mut self, default: Option<ServiceConfig>) -> Self {
|
||||||
|
self.default_config = default;
|
||||||
|
self
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn add_paths(
|
||||||
|
mut self,
|
||||||
|
paths: impl AsRef<[&'static str]>,
|
||||||
|
config: Option<ServiceConfig>,
|
||||||
|
) -> Self {
|
||||||
|
self.path_configs.push((Arc::from(paths.as_ref()), config));
|
||||||
|
self
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn build(self) -> JWTAuthorization {
|
||||||
|
JWTAuthorization {
|
||||||
|
default_config: Arc::new(self.default_config),
|
||||||
|
path_configs: Arc::from(self.path_configs),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/// Middleware guard working with JWT tokens.
|
/// Middleware guard working with JWT tokens.
|
||||||
pub struct JWTAuthorization {
|
pub struct JWTAuthorization {
|
||||||
/// List of ignored endpoints.
|
pub default_config: Arc<Option<ServiceConfig>>,
|
||||||
pub ignore: &'static [&'static str],
|
pub path_configs: Arc<[ServiceKV]>,
|
||||||
}
|
|
||||||
|
|
||||||
impl Default for JWTAuthorization {
|
|
||||||
fn default() -> Self {
|
|
||||||
Self { ignore: &[] }
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<S, B> Transform<S, ServiceRequest> for JWTAuthorization
|
impl<S, B> Transform<S, ServiceRequest> for JWTAuthorization
|
||||||
where
|
where
|
||||||
S: Service<ServiceRequest, Response = ServiceResponse<B>, Error = Error>,
|
S: Service<ServiceRequest, Response = ServiceResponse<B>, Error = Error> + 'static,
|
||||||
S::Future: 'static,
|
S::Future: 'static,
|
||||||
B: 'static,
|
B: 'static,
|
||||||
{
|
{
|
||||||
@@ -33,16 +79,18 @@ where
|
|||||||
|
|
||||||
fn new_transform(&self, service: S) -> Self::Future {
|
fn new_transform(&self, service: S) -> Self::Future {
|
||||||
ready(Ok(JWTAuthorizationMiddleware {
|
ready(Ok(JWTAuthorizationMiddleware {
|
||||||
service,
|
service: Rc::new(service),
|
||||||
ignore: self.ignore,
|
default_config: self.default_config.clone(),
|
||||||
|
path_configs: self.path_configs.clone(),
|
||||||
}))
|
}))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub struct JWTAuthorizationMiddleware<S> {
|
pub struct JWTAuthorizationMiddleware<S> {
|
||||||
service: S,
|
service: Rc<S>,
|
||||||
/// List of ignored endpoints.
|
|
||||||
ignore: &'static [&'static str],
|
default_config: Arc<Option<ServiceConfig>>,
|
||||||
|
path_configs: Arc<[ServiceKV]>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<S, B> JWTAuthorizationMiddleware<S>
|
impl<S, B> JWTAuthorizationMiddleware<S>
|
||||||
@@ -52,36 +100,74 @@ where
|
|||||||
B: 'static,
|
B: 'static,
|
||||||
{
|
{
|
||||||
/// Checking the validity of the token.
|
/// Checking the validity of the token.
|
||||||
fn check_authorization(
|
async fn check_authorization(
|
||||||
&self,
|
|
||||||
req: &HttpRequest,
|
req: &HttpRequest,
|
||||||
payload: &mut Payload,
|
allow_service_user: bool,
|
||||||
|
required_user_roles: Option<&'static [UserRole]>,
|
||||||
) -> Result<(), authorized_user::Error> {
|
) -> Result<(), authorized_user::Error> {
|
||||||
User::from_request_sync(req, payload)
|
let claims = get_claims_from_req(req).map_err(authorized_user::Error::from)?;
|
||||||
.map(|_| ())
|
|
||||||
.map_err(|e| e.as_error::<authorized_user::Error>().unwrap().clone())
|
let db = req
|
||||||
|
.app_data::<web::Data<AppState>>()
|
||||||
|
.unwrap()
|
||||||
|
.get_database();
|
||||||
|
|
||||||
|
let user_type = claims.user_type.unwrap_or(UserType::Default);
|
||||||
|
|
||||||
|
match user_type {
|
||||||
|
UserType::Default => {
|
||||||
|
if let Some(required_user_roles) = required_user_roles {
|
||||||
|
let Ok(Some(user)) = Query::find_user_by_id(db, &claims.id).await else {
|
||||||
|
return Err(authorized_user::Error::NoUser);
|
||||||
|
};
|
||||||
|
|
||||||
|
if !required_user_roles.contains(&user.role) {
|
||||||
|
return Err(authorized_user::Error::InsufficientRights);
|
||||||
}
|
}
|
||||||
|
|
||||||
fn should_skip(&self, req: &ServiceRequest) -> bool {
|
return Ok(());
|
||||||
let path = req.match_info().unprocessed();
|
|
||||||
|
|
||||||
self.ignore.iter().any(|ignore| {
|
|
||||||
if !path.starts_with(ignore) {
|
|
||||||
return false;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if let Some(other) = path.as_bytes().iter().nth(ignore.len()) {
|
match Query::is_user_exists_by_id(db, &claims.id).await {
|
||||||
return ['?' as u8, '/' as u8].contains(other);
|
Ok(true) => Ok(()),
|
||||||
|
_ => Err(authorized_user::Error::NoUser),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
UserType::Service => {
|
||||||
|
if !allow_service_user {
|
||||||
|
return Err(authorized_user::Error::NonDefaultUserType);
|
||||||
}
|
}
|
||||||
|
|
||||||
true
|
match Query::is_service_user_exists_by_id(db, &claims.id).await {
|
||||||
})
|
Ok(true) => Ok(()),
|
||||||
|
_ => Err(authorized_user::Error::NoUser),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn find_config(
|
||||||
|
current_path: &str,
|
||||||
|
per_route: &[ServiceKV],
|
||||||
|
default: &Option<ServiceConfig>,
|
||||||
|
) -> Option<ServiceConfig> {
|
||||||
|
for (service_paths, config) in per_route {
|
||||||
|
for service_path in service_paths.deref() {
|
||||||
|
if !service_path.eq(¤t_path) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
return config.clone();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
default.clone()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a, S, B> Service<ServiceRequest> for JWTAuthorizationMiddleware<S>
|
impl<S, B> Service<ServiceRequest> for JWTAuthorizationMiddleware<S>
|
||||||
where
|
where
|
||||||
S: Service<ServiceRequest, Response = ServiceResponse<B>, Error = Error>,
|
S: Service<ServiceRequest, Response = ServiceResponse<B>, Error = Error> + 'static,
|
||||||
S::Future: 'static,
|
S::Future: 'static,
|
||||||
B: 'static,
|
B: 'static,
|
||||||
{
|
{
|
||||||
@@ -92,25 +178,42 @@ where
|
|||||||
forward_ready!(service);
|
forward_ready!(service);
|
||||||
|
|
||||||
fn call(&self, req: ServiceRequest) -> Self::Future {
|
fn call(&self, req: ServiceRequest) -> Self::Future {
|
||||||
if self.should_skip(&req) {
|
let service = Rc::clone(&self.service);
|
||||||
|
|
||||||
|
let match_info = req.match_info();
|
||||||
|
let path = if let Some(pattern) = req.match_pattern() {
|
||||||
|
let scope_start_idx = match_info
|
||||||
|
.as_str()
|
||||||
|
.find(match_info.unprocessed())
|
||||||
|
.unwrap_or(0);
|
||||||
|
|
||||||
|
pattern.as_str().split_at(scope_start_idx).1.to_owned()
|
||||||
|
} else {
|
||||||
|
match_info.unprocessed().to_owned()
|
||||||
|
};
|
||||||
|
|
||||||
|
let Some(config) = Self::find_config(&path, &self.path_configs, &self.default_config)
|
||||||
|
else {
|
||||||
let fut = self.service.call(req);
|
let fut = self.service.call(req);
|
||||||
return Box::pin(async move { Ok(fut.await?.map_into_left_body()) });
|
return Box::pin(async move { Ok(fut.await?.map_into_left_body()) });
|
||||||
|
};
|
||||||
|
|
||||||
|
let allow_service_user = config.allow_service;
|
||||||
|
let required_user_roles = config.user_roles;
|
||||||
|
|
||||||
|
Box::pin(async move {
|
||||||
|
match Self::check_authorization(req.request(), allow_service_user, required_user_roles)
|
||||||
|
.await
|
||||||
|
{
|
||||||
|
Ok(_) => {
|
||||||
|
let fut = service.call(req).await?;
|
||||||
|
Ok(fut.map_into_left_body())
|
||||||
}
|
}
|
||||||
|
Err(err) => Ok(ServiceResponse::new(
|
||||||
let (http_req, mut payload) = req.into_parts();
|
req.into_parts().0,
|
||||||
|
|
||||||
if let Err(err) = self.check_authorization(&http_req, &mut payload) {
|
|
||||||
return Box::pin(async move {
|
|
||||||
Ok(ServiceResponse::new(
|
|
||||||
http_req,
|
|
||||||
err.error_response().map_into_right_body(),
|
err.error_response().map_into_right_body(),
|
||||||
))
|
)),
|
||||||
});
|
|
||||||
}
|
}
|
||||||
|
})
|
||||||
let req = ServiceRequest::from_parts(http_req, payload);
|
|
||||||
let fut = self.service.call(req);
|
|
||||||
|
|
||||||
Box::pin(async move { Ok(fut.await?.map_into_left_body()) })
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,10 +1,10 @@
|
|||||||
use actix_web::Error;
|
|
||||||
use actix_web::body::{BoxBody, EitherBody};
|
use actix_web::body::{BoxBody, EitherBody};
|
||||||
use actix_web::dev::{Service, ServiceRequest, ServiceResponse, Transform, forward_ready};
|
use actix_web::dev::{forward_ready, Service, ServiceRequest, ServiceResponse, Transform};
|
||||||
use actix_web::http::header;
|
use actix_web::http::header;
|
||||||
use actix_web::http::header::HeaderValue;
|
use actix_web::http::header::HeaderValue;
|
||||||
|
use actix_web::Error;
|
||||||
use futures_util::future::LocalBoxFuture;
|
use futures_util::future::LocalBoxFuture;
|
||||||
use std::future::{Ready, ready};
|
use std::future::{ready, Ready};
|
||||||
|
|
||||||
/// Middleware to specify the encoding in the Content-Type header.
|
/// Middleware to specify the encoding in the Content-Type header.
|
||||||
pub struct ContentTypeBootstrap;
|
pub struct ContentTypeBootstrap;
|
||||||
@@ -30,7 +30,7 @@ pub struct ContentTypeMiddleware<S> {
|
|||||||
service: S,
|
service: S,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a, S, B> Service<ServiceRequest> for ContentTypeMiddleware<S>
|
impl<S, B> Service<ServiceRequest> for ContentTypeMiddleware<S>
|
||||||
where
|
where
|
||||||
S: Service<ServiceRequest, Response = ServiceResponse<B>, Error = Error>,
|
S: Service<ServiceRequest, Response = ServiceResponse<B>, Error = Error>,
|
||||||
S::Future: 'static,
|
S::Future: 'static,
|
||||||
@@ -49,14 +49,15 @@ where
|
|||||||
let mut response = fut.await?;
|
let mut response = fut.await?;
|
||||||
|
|
||||||
let headers = response.response_mut().headers_mut();
|
let headers = response.response_mut().headers_mut();
|
||||||
if let Some(content_type) = headers.get("Content-Type") {
|
|
||||||
if content_type == "application/json" {
|
if let Some(content_type) = headers.get("Content-Type")
|
||||||
|
&& content_type == "application/json"
|
||||||
|
{
|
||||||
headers.insert(
|
headers.insert(
|
||||||
header::CONTENT_TYPE,
|
header::CONTENT_TYPE,
|
||||||
HeaderValue::from_static("application/json; charset=utf8"),
|
HeaderValue::from_static("application/json; charset=utf8"),
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
Ok(response.map_into_left_body())
|
Ok(response.map_into_left_body())
|
||||||
})
|
})
|
||||||
|
|||||||
@@ -1,15 +1,15 @@
|
|||||||
use std::fmt::{Write};
|
|
||||||
use std::fmt::Display;
|
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
|
use std::fmt::Display;
|
||||||
|
use std::fmt::Write;
|
||||||
|
|
||||||
/// Server response to errors within Middleware.
|
/// Server response to errors within Middleware.
|
||||||
#[derive(Serialize, Deserialize)]
|
#[derive(Serialize, Deserialize)]
|
||||||
pub struct ResponseErrorMessage<T: Display> {
|
pub struct MiddlewareError<T: Display> {
|
||||||
code: T,
|
code: T,
|
||||||
message: String,
|
message: String,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<T: Display + Serialize> ResponseErrorMessage<T> {
|
impl<T: Display + Serialize> MiddlewareError<T> {
|
||||||
pub fn new(code: T) -> Self {
|
pub fn new(code: T) -> Self {
|
||||||
let mut message = String::new();
|
let mut message = String::new();
|
||||||
write!(&mut message, "{}", code).unwrap();
|
write!(&mut message, "{}", code).unwrap();
|
||||||
@@ -1,2 +1,4 @@
|
|||||||
|
pub mod error;
|
||||||
|
|
||||||
pub mod authorization;
|
pub mod authorization;
|
||||||
pub mod content_type;
|
pub mod content_type;
|
||||||
@@ -1,743 +0,0 @@
|
|||||||
use crate::parser::LessonParseResult::{Lessons, Street};
|
|
||||||
use crate::parser::schema::LessonType::Break;
|
|
||||||
use crate::parser::schema::{
|
|
||||||
Day, ErrorCell, ErrorCellPos, Lesson, LessonSubGroup, LessonTime, LessonType, ParseError,
|
|
||||||
ParseResult, ScheduleEntry,
|
|
||||||
};
|
|
||||||
use calamine::{Reader, Xls, open_workbook_from_rs};
|
|
||||||
use chrono::{DateTime, Duration, NaiveDateTime, Utc};
|
|
||||||
use fuzzy_matcher::FuzzyMatcher;
|
|
||||||
use fuzzy_matcher::skim::SkimMatcherV2;
|
|
||||||
use regex::Regex;
|
|
||||||
use std::collections::HashMap;
|
|
||||||
use std::io::Cursor;
|
|
||||||
use std::sync::LazyLock;
|
|
||||||
|
|
||||||
pub mod schema;
|
|
||||||
|
|
||||||
/// Data cell storing the line.
|
|
||||||
struct InternalId {
|
|
||||||
/// Line index.
|
|
||||||
row: u32,
|
|
||||||
|
|
||||||
/// Column index.
|
|
||||||
column: u32,
|
|
||||||
|
|
||||||
/// Text in the cell.
|
|
||||||
name: String,
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Data on the time of lessons from the second column of the schedule.
|
|
||||||
struct InternalTime {
|
|
||||||
/// Temporary segment of the lesson.
|
|
||||||
time_range: LessonTime,
|
|
||||||
|
|
||||||
/// Type of lesson.
|
|
||||||
lesson_type: LessonType,
|
|
||||||
|
|
||||||
/// The lesson index.
|
|
||||||
default_index: Option<u32>,
|
|
||||||
|
|
||||||
/// The frame of the cell.
|
|
||||||
xls_range: ((u32, u32), (u32, u32)),
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Working sheet type alias.
|
|
||||||
type WorkSheet = calamine::Range<calamine::Data>;
|
|
||||||
|
|
||||||
/// Getting a line from the required cell.
|
|
||||||
fn get_string_from_cell(worksheet: &WorkSheet, row: u32, col: u32) -> Option<String> {
|
|
||||||
let cell_data = if let Some(data) = worksheet.get((row as usize, col as usize)) {
|
|
||||||
data.to_string()
|
|
||||||
} else {
|
|
||||||
return None;
|
|
||||||
};
|
|
||||||
|
|
||||||
if cell_data.trim().is_empty() {
|
|
||||||
return None;
|
|
||||||
}
|
|
||||||
|
|
||||||
static NL_RE: LazyLock<Regex> = LazyLock::new(|| Regex::new(r"[\n\r]+").unwrap());
|
|
||||||
static SP_RE: LazyLock<Regex> = LazyLock::new(|| Regex::new(r"\s+").unwrap());
|
|
||||||
|
|
||||||
let trimmed_data = SP_RE
|
|
||||||
.replace_all(&NL_RE.replace_all(&cell_data, " "), " ")
|
|
||||||
.trim()
|
|
||||||
.to_string();
|
|
||||||
|
|
||||||
if trimmed_data.is_empty() {
|
|
||||||
None
|
|
||||||
} else {
|
|
||||||
Some(trimmed_data)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Obtaining the boundaries of the cell along its upper left coordinate.
|
|
||||||
fn get_merge_from_start(worksheet: &WorkSheet, row: u32, column: u32) -> ((u32, u32), (u32, u32)) {
|
|
||||||
let worksheet_end = worksheet.end().unwrap();
|
|
||||||
|
|
||||||
let row_end: u32 = {
|
|
||||||
let mut r: u32 = 0;
|
|
||||||
|
|
||||||
for _r in (row + 1)..worksheet_end.0 {
|
|
||||||
r = _r;
|
|
||||||
|
|
||||||
if let Some(_) = worksheet.get((_r as usize, column as usize)) {
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
r
|
|
||||||
};
|
|
||||||
|
|
||||||
let column_end: u32 = {
|
|
||||||
let mut c: u32 = 0;
|
|
||||||
|
|
||||||
for _c in (column + 1)..worksheet_end.1 {
|
|
||||||
c = _c;
|
|
||||||
|
|
||||||
if let Some(_) = worksheet.get((row as usize, _c as usize)) {
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
c
|
|
||||||
};
|
|
||||||
|
|
||||||
((row, column), (row_end, column_end))
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Obtaining a "skeleton" schedule from the working sheet.
|
|
||||||
fn parse_skeleton(worksheet: &WorkSheet) -> Result<(Vec<InternalId>, Vec<InternalId>), ParseError> {
|
|
||||||
let range = &worksheet;
|
|
||||||
|
|
||||||
let mut is_parsed = false;
|
|
||||||
|
|
||||||
let mut groups: Vec<InternalId> = Vec::new();
|
|
||||||
let mut days: Vec<InternalId> = Vec::new();
|
|
||||||
|
|
||||||
let start = range.start().ok_or(ParseError::UnknownWorkSheetRange)?;
|
|
||||||
let end = range.end().ok_or(ParseError::UnknownWorkSheetRange)?;
|
|
||||||
|
|
||||||
let mut row = start.0;
|
|
||||||
while row < end.0 {
|
|
||||||
row += 1;
|
|
||||||
|
|
||||||
let day_name_opt = get_string_from_cell(&worksheet, row, 0);
|
|
||||||
if day_name_opt.is_none() {
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
let day_name = day_name_opt.unwrap();
|
|
||||||
|
|
||||||
if !is_parsed {
|
|
||||||
is_parsed = true;
|
|
||||||
|
|
||||||
row -= 1;
|
|
||||||
|
|
||||||
for column in (start.1 + 2)..=end.1 {
|
|
||||||
let group_name = get_string_from_cell(&worksheet, row, column);
|
|
||||||
if group_name.is_none() {
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
groups.push(InternalId {
|
|
||||||
row,
|
|
||||||
column,
|
|
||||||
name: group_name.unwrap(),
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
row += 1;
|
|
||||||
}
|
|
||||||
|
|
||||||
days.push(InternalId {
|
|
||||||
row,
|
|
||||||
column: 0,
|
|
||||||
name: day_name.clone(),
|
|
||||||
});
|
|
||||||
|
|
||||||
if days.len() > 2 && day_name.starts_with("Суббота") {
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok((days, groups))
|
|
||||||
}
|
|
||||||
|
|
||||||
/// The result of obtaining a lesson from the cell.
|
|
||||||
enum LessonParseResult {
|
|
||||||
/// List of lessons long from one to two.
|
|
||||||
///
|
|
||||||
/// The number of lessons will be equal to one if the couple is the first in the day,
|
|
||||||
/// otherwise the list from the change template and the lesson itself will be returned.
|
|
||||||
Lessons(Vec<Lesson>),
|
|
||||||
|
|
||||||
/// Street on which the Polytechnic Corps is located.
|
|
||||||
Street(String),
|
|
||||||
}
|
|
||||||
|
|
||||||
trait StringInnerSlice {
|
|
||||||
/// Obtaining a line from the line on the initial and final index.
|
|
||||||
fn inner_slice(&self, from: usize, to: usize) -> Self;
|
|
||||||
}
|
|
||||||
|
|
||||||
impl StringInnerSlice for String {
|
|
||||||
fn inner_slice(&self, from: usize, to: usize) -> Self {
|
|
||||||
self.chars()
|
|
||||||
.take(from)
|
|
||||||
.chain(self.chars().skip(to))
|
|
||||||
.collect()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// noinspection GrazieInspection
|
|
||||||
/// Obtaining a non-standard type of lesson by name.
|
|
||||||
fn guess_lesson_type(name: &String) -> Option<(String, LessonType)> {
|
|
||||||
let map: HashMap<String, LessonType> = HashMap::from([
|
|
||||||
("(консультация)".to_string(), LessonType::Consultation),
|
|
||||||
(
|
|
||||||
"самостоятельная работа".to_string(),
|
|
||||||
LessonType::IndependentWork,
|
|
||||||
),
|
|
||||||
("зачет".to_string(), LessonType::Exam),
|
|
||||||
("зачет с оценкой".to_string(), LessonType::ExamWithGrade),
|
|
||||||
("экзамен".to_string(), LessonType::ExamDefault),
|
|
||||||
]);
|
|
||||||
|
|
||||||
let matcher = SkimMatcherV2::default();
|
|
||||||
let name_lower = name.to_lowercase();
|
|
||||||
|
|
||||||
type SearchResult<'a> = (&'a LessonType, i64, Vec<usize>);
|
|
||||||
|
|
||||||
let mut search_results: Vec<SearchResult> = map
|
|
||||||
.iter()
|
|
||||||
.map(|entry| -> SearchResult {
|
|
||||||
if let Some((score, indices)) = matcher.fuzzy_indices(&*name_lower, entry.0) {
|
|
||||||
return (entry.1, score, indices);
|
|
||||||
}
|
|
||||||
|
|
||||||
(entry.1, 0, Vec::new())
|
|
||||||
})
|
|
||||||
.collect();
|
|
||||||
search_results.sort_by(|a, b| b.1.cmp(&a.1));
|
|
||||||
|
|
||||||
let guessed_type = search_results.first().unwrap();
|
|
||||||
|
|
||||||
if guessed_type.1 > 80 {
|
|
||||||
Some((
|
|
||||||
name.inner_slice(guessed_type.2[0], guessed_type.2[guessed_type.2.len() - 1]),
|
|
||||||
guessed_type.0.clone(),
|
|
||||||
))
|
|
||||||
} else {
|
|
||||||
None
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Getting a pair or street from a cell.
|
|
||||||
fn parse_lesson(
|
|
||||||
worksheet: &WorkSheet,
|
|
||||||
day: &mut Day,
|
|
||||||
day_times: &Vec<InternalTime>,
|
|
||||||
time: &InternalTime,
|
|
||||||
column: u32,
|
|
||||||
) -> Result<LessonParseResult, ParseError> {
|
|
||||||
let row = time.xls_range.0.0;
|
|
||||||
|
|
||||||
let (name, lesson_type) = {
|
|
||||||
let raw_name_opt = get_string_from_cell(&worksheet, row, column);
|
|
||||||
if raw_name_opt.is_none() {
|
|
||||||
return Ok(Lessons(Vec::new()));
|
|
||||||
}
|
|
||||||
|
|
||||||
let raw_name = raw_name_opt.unwrap();
|
|
||||||
|
|
||||||
static OTHER_STREET_RE: LazyLock<Regex> =
|
|
||||||
LazyLock::new(|| Regex::new(r"^[А-Я][а-я]+,?\s?[0-9]+$").unwrap());
|
|
||||||
|
|
||||||
if OTHER_STREET_RE.is_match(&raw_name) {
|
|
||||||
return Ok(Street(raw_name));
|
|
||||||
}
|
|
||||||
|
|
||||||
if let Some(guess) = guess_lesson_type(&raw_name) {
|
|
||||||
guess
|
|
||||||
} else {
|
|
||||||
(raw_name, time.lesson_type.clone())
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
let (default_range, lesson_time) = || -> Result<(Option<[u8; 2]>, LessonTime), ParseError> {
|
|
||||||
// check if multi-lesson
|
|
||||||
let cell_range = get_merge_from_start(worksheet, row, column);
|
|
||||||
|
|
||||||
let end_time_arr = day_times
|
|
||||||
.iter()
|
|
||||||
.filter(|time| time.xls_range.1.0 == cell_range.1.0)
|
|
||||||
.collect::<Vec<&InternalTime>>();
|
|
||||||
|
|
||||||
let end_time = end_time_arr
|
|
||||||
.first()
|
|
||||||
.ok_or(ParseError::LessonTimeNotFound(ErrorCellPos { row, column }))?;
|
|
||||||
|
|
||||||
let range: Option<[u8; 2]> = if time.default_index != None {
|
|
||||||
let default = time.default_index.unwrap() as u8;
|
|
||||||
Some([default, end_time.default_index.unwrap() as u8])
|
|
||||||
} else {
|
|
||||||
None
|
|
||||||
};
|
|
||||||
|
|
||||||
let time = LessonTime {
|
|
||||||
start: time.time_range.start,
|
|
||||||
end: end_time.time_range.end,
|
|
||||||
};
|
|
||||||
|
|
||||||
Ok((range, time))
|
|
||||||
}()?;
|
|
||||||
|
|
||||||
let (name, mut subgroups) = parse_name_and_subgroups(&name)?;
|
|
||||||
|
|
||||||
{
|
|
||||||
let cabinets: Vec<String> = parse_cabinets(worksheet, row, column + 1);
|
|
||||||
|
|
||||||
// Если количество кабинетов равно 1, назначаем этот кабинет всем подгруппам
|
|
||||||
if cabinets.len() == 1 {
|
|
||||||
for subgroup in &mut subgroups {
|
|
||||||
subgroup.cabinet = Some(cabinets.get(0).or(Some(&String::new())).unwrap().clone())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
// Если количество кабинетов совпадает с количеством подгрупп, назначаем кабинеты по порядку
|
|
||||||
else if cabinets.len() == subgroups.len() {
|
|
||||||
for subgroup in &mut subgroups {
|
|
||||||
subgroup.cabinet = Some(
|
|
||||||
cabinets
|
|
||||||
.get((subgroup.number - 1) as usize)
|
|
||||||
.unwrap()
|
|
||||||
.clone(),
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
// Если количество кабинетов больше количества подгрупп, делаем ещё одну подгруппу.
|
|
||||||
else if cabinets.len() > subgroups.len() {
|
|
||||||
for index in 0..subgroups.len() {
|
|
||||||
subgroups[index].cabinet = Some(cabinets[index].clone());
|
|
||||||
}
|
|
||||||
|
|
||||||
while cabinets.len() > subgroups.len() {
|
|
||||||
subgroups.push(LessonSubGroup {
|
|
||||||
number: (subgroups.len() + 1) as u8,
|
|
||||||
cabinet: Some(cabinets[subgroups.len()].clone()),
|
|
||||||
teacher: "Ошибка в расписании".to_string(),
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
// Если кабинетов нет, но есть подгруппы, назначаем им значение "??"
|
|
||||||
else {
|
|
||||||
for subgroup in &mut subgroups {
|
|
||||||
subgroup.cabinet = Some("??".to_string());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
cabinets
|
|
||||||
};
|
|
||||||
|
|
||||||
let lesson = Lesson {
|
|
||||||
lesson_type,
|
|
||||||
default_range,
|
|
||||||
name: Some(name),
|
|
||||||
time: lesson_time,
|
|
||||||
subgroups: Some(subgroups),
|
|
||||||
group: None,
|
|
||||||
};
|
|
||||||
|
|
||||||
let prev_lesson = if day.lessons.len() == 0 {
|
|
||||||
return Ok(Lessons(Vec::from([lesson])));
|
|
||||||
} else {
|
|
||||||
&day.lessons[day.lessons.len() - 1]
|
|
||||||
};
|
|
||||||
|
|
||||||
Ok(Lessons(Vec::from([
|
|
||||||
Lesson {
|
|
||||||
lesson_type: Break,
|
|
||||||
default_range: None,
|
|
||||||
name: None,
|
|
||||||
time: LessonTime {
|
|
||||||
start: prev_lesson.time.end,
|
|
||||||
end: lesson.time.start,
|
|
||||||
},
|
|
||||||
subgroups: Some(Vec::new()),
|
|
||||||
group: None,
|
|
||||||
},
|
|
||||||
lesson,
|
|
||||||
])))
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Obtaining a list of cabinets to the right of the lesson cell.
|
|
||||||
fn parse_cabinets(worksheet: &WorkSheet, row: u32, column: u32) -> Vec<String> {
|
|
||||||
let mut cabinets: Vec<String> = Vec::new();
|
|
||||||
|
|
||||||
if let Some(raw) = get_string_from_cell(&worksheet, row, column) {
|
|
||||||
let clean = raw.replace("\n", " ");
|
|
||||||
let parts: Vec<&str> = clean.split(" ").collect();
|
|
||||||
|
|
||||||
for part in parts {
|
|
||||||
let clean_part = part.to_string().trim().to_string();
|
|
||||||
|
|
||||||
cabinets.push(clean_part);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
cabinets
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Getting the "pure" name of the lesson and list of teachers from the text of the lesson cell.
|
|
||||||
fn parse_name_and_subgroups(name: &String) -> Result<(String, Vec<LessonSubGroup>), ParseError> {
|
|
||||||
static LESSON_RE: LazyLock<Regex> =
|
|
||||||
LazyLock::new(|| Regex::new(r"(?:[А-Я][а-я]+[А-Я]{2}(?:\([0-9][а-я]+\))?)+$").unwrap());
|
|
||||||
static TEACHER_RE: LazyLock<Regex> =
|
|
||||||
LazyLock::new(|| Regex::new(r"([А-Я][а-я]+)([А-Я])([А-Я])(?:\(([0-9])[а-я]+\))?").unwrap());
|
|
||||||
static CLEAN_RE: LazyLock<Regex> = LazyLock::new(|| Regex::new(r"[\s.,]+").unwrap());
|
|
||||||
static END_CLEAN_RE: LazyLock<Regex> = LazyLock::new(|| Regex::new(r"[.\s]+$").unwrap());
|
|
||||||
|
|
||||||
let (teachers, lesson_name) = {
|
|
||||||
let clean_name = CLEAN_RE.replace_all(&name, "").to_string();
|
|
||||||
|
|
||||||
if let Some(captures) = LESSON_RE.captures(&clean_name) {
|
|
||||||
let capture = captures.get(0).unwrap();
|
|
||||||
let capture_str = capture.as_str().to_string();
|
|
||||||
let capture_name: String = capture_str.chars().take(5).collect();
|
|
||||||
|
|
||||||
(
|
|
||||||
END_CLEAN_RE.replace(&capture_str, "").to_string(),
|
|
||||||
END_CLEAN_RE
|
|
||||||
.replace(&name[0..name.find(&*capture_name).unwrap()], "")
|
|
||||||
.to_string(),
|
|
||||||
)
|
|
||||||
} else {
|
|
||||||
return Ok((END_CLEAN_RE.replace(&name, "").to_string(), Vec::new()));
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
let mut subgroups: Vec<LessonSubGroup> = Vec::new();
|
|
||||||
|
|
||||||
let teacher_it = TEACHER_RE.captures_iter(&teachers);
|
|
||||||
|
|
||||||
for captures in teacher_it {
|
|
||||||
subgroups.push(LessonSubGroup {
|
|
||||||
number: match captures.get(4) {
|
|
||||||
Some(capture) => capture.as_str().to_string().parse::<u8>().unwrap(),
|
|
||||||
None => 0,
|
|
||||||
},
|
|
||||||
cabinet: None,
|
|
||||||
teacher: format!(
|
|
||||||
"{} {}.{}.",
|
|
||||||
captures.get(1).unwrap().as_str().to_string(),
|
|
||||||
captures.get(2).unwrap().as_str().to_string(),
|
|
||||||
captures.get(3).unwrap().as_str().to_string()
|
|
||||||
),
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
// фикс, если у кого-то отсутствует индекс подгруппы
|
|
||||||
|
|
||||||
if subgroups.len() == 1 {
|
|
||||||
let index = subgroups[0].number;
|
|
||||||
|
|
||||||
if index == 0 {
|
|
||||||
subgroups[0].number = 1u8;
|
|
||||||
} else {
|
|
||||||
subgroups.push(LessonSubGroup {
|
|
||||||
number: if index == 1 { 2 } else { 1 },
|
|
||||||
cabinet: None,
|
|
||||||
teacher: "Только у другой".to_string(),
|
|
||||||
});
|
|
||||||
}
|
|
||||||
} else if subgroups.len() == 2 {
|
|
||||||
// если индексы отсутствуют у обоих, ставим поочерёдно
|
|
||||||
if subgroups[0].number == 0 && subgroups[1].number == 0 {
|
|
||||||
subgroups[0].number = 1;
|
|
||||||
subgroups[1].number = 2;
|
|
||||||
}
|
|
||||||
// если индекс отсутствует у первого, ставим 2, если у второго индекс 1 и наоборот
|
|
||||||
else if subgroups[0].number == 0 {
|
|
||||||
subgroups[0].number = if subgroups[1].number == 1 { 2 } else { 1 };
|
|
||||||
}
|
|
||||||
// если индекс отсутствует у второго, ставим 2, если у первого индекс 1 и наоборот
|
|
||||||
else if subgroups[1].number == 0 {
|
|
||||||
subgroups[1].number = if subgroups[0].number == 1 { 2 } else { 1 };
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if subgroups.len() == 2 && subgroups[0].number == 2 && subgroups[1].number == 1 {
|
|
||||||
subgroups.reverse()
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok((lesson_name, subgroups))
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Conversion of the list of couples of groups in the list of lessons of teachers.
|
|
||||||
fn convert_groups_to_teachers(
|
|
||||||
groups: &HashMap<String, ScheduleEntry>,
|
|
||||||
) -> HashMap<String, ScheduleEntry> {
|
|
||||||
let mut teachers: HashMap<String, ScheduleEntry> = HashMap::new();
|
|
||||||
|
|
||||||
let empty_days: Vec<Day> = groups
|
|
||||||
.values()
|
|
||||||
.next()
|
|
||||||
.unwrap()
|
|
||||||
.days
|
|
||||||
.iter()
|
|
||||||
.map(|day| Day {
|
|
||||||
name: day.name.clone(),
|
|
||||||
street: day.street.clone(),
|
|
||||||
date: day.date.clone(),
|
|
||||||
lessons: vec![],
|
|
||||||
})
|
|
||||||
.collect();
|
|
||||||
|
|
||||||
for group in groups.values() {
|
|
||||||
for (index, day) in group.days.iter().enumerate() {
|
|
||||||
for group_lesson in &day.lessons {
|
|
||||||
if group_lesson.lesson_type == Break {
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
if group_lesson.subgroups.is_none() {
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
let subgroups = group_lesson.subgroups.as_ref().unwrap();
|
|
||||||
|
|
||||||
for subgroup in subgroups {
|
|
||||||
if subgroup.teacher == "Ошибка в расписании" {
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
if !teachers.contains_key(&subgroup.teacher) {
|
|
||||||
teachers.insert(
|
|
||||||
subgroup.teacher.clone(),
|
|
||||||
ScheduleEntry {
|
|
||||||
name: subgroup.teacher.clone(),
|
|
||||||
days: empty_days.to_vec(),
|
|
||||||
},
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
let teacher_day = teachers
|
|
||||||
.get_mut(&subgroup.teacher)
|
|
||||||
.unwrap()
|
|
||||||
.days
|
|
||||||
.get_mut(index)
|
|
||||||
.unwrap();
|
|
||||||
|
|
||||||
teacher_day.lessons.push({
|
|
||||||
let mut lesson = group_lesson.clone();
|
|
||||||
lesson.group = Some(group.name.clone());
|
|
||||||
|
|
||||||
lesson
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
teachers.iter_mut().for_each(|(_, teacher)| {
|
|
||||||
teacher.days.iter_mut().for_each(|day| {
|
|
||||||
day.lessons.sort_by(|a, b| {
|
|
||||||
a.default_range.as_ref().unwrap()[1].cmp(&b.default_range.as_ref().unwrap()[1])
|
|
||||||
})
|
|
||||||
})
|
|
||||||
});
|
|
||||||
|
|
||||||
teachers
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Reading XLS Document from the buffer and converting it into the schedule ready to use.
|
|
||||||
///
|
|
||||||
/// # Arguments
|
|
||||||
///
|
|
||||||
/// * `buffer`: XLS data containing schedule.
|
|
||||||
///
|
|
||||||
/// returns: Result<ParseResult, ParseError>
|
|
||||||
///
|
|
||||||
/// # Examples
|
|
||||||
///
|
|
||||||
/// ```
|
|
||||||
/// use schedule_parser_rusted::parser::parse_xls;
|
|
||||||
///
|
|
||||||
/// let result = parse_xls(&include_bytes!("../../schedule.xls").to_vec());
|
|
||||||
///
|
|
||||||
/// assert!(result.is_ok());
|
|
||||||
///
|
|
||||||
/// assert_ne!(result.as_ref().unwrap().groups.len(), 0);
|
|
||||||
/// assert_ne!(result.as_ref().unwrap().teachers.len(), 0);
|
|
||||||
/// ```
|
|
||||||
pub fn parse_xls(buffer: &Vec<u8>) -> Result<ParseResult, ParseError> {
|
|
||||||
let cursor = Cursor::new(&buffer);
|
|
||||||
let mut workbook: Xls<_> =
|
|
||||||
open_workbook_from_rs(cursor).map_err(|e| ParseError::BadXLS(std::sync::Arc::new(e)))?;
|
|
||||||
|
|
||||||
let worksheet: WorkSheet = workbook
|
|
||||||
.worksheets()
|
|
||||||
.first()
|
|
||||||
.ok_or(ParseError::NoWorkSheets)?
|
|
||||||
.1
|
|
||||||
.to_owned();
|
|
||||||
|
|
||||||
let (days_markup, groups_markup) = parse_skeleton(&worksheet)?;
|
|
||||||
|
|
||||||
let mut groups: HashMap<String, ScheduleEntry> = HashMap::new();
|
|
||||||
let mut days_times: Vec<Vec<InternalTime>> = Vec::new();
|
|
||||||
|
|
||||||
let saturday_end_row = worksheet.end().unwrap().0;
|
|
||||||
|
|
||||||
for group_markup in groups_markup {
|
|
||||||
let mut group = ScheduleEntry {
|
|
||||||
name: group_markup.name,
|
|
||||||
days: Vec::new(),
|
|
||||||
};
|
|
||||||
|
|
||||||
for day_index in 0..(&days_markup).len() {
|
|
||||||
let day_markup = &days_markup[day_index];
|
|
||||||
|
|
||||||
let mut day = {
|
|
||||||
let space_index = day_markup.name.find(' ').unwrap();
|
|
||||||
|
|
||||||
let name = day_markup.name[..space_index].to_string();
|
|
||||||
|
|
||||||
let date_raw = day_markup.name[space_index + 1..].to_string();
|
|
||||||
let date_add = format!("{} 00:00:00", date_raw);
|
|
||||||
|
|
||||||
let date = NaiveDateTime::parse_from_str(&*date_add, "%d.%m.%Y %H:%M:%S");
|
|
||||||
|
|
||||||
Day {
|
|
||||||
name,
|
|
||||||
street: None,
|
|
||||||
date: date.unwrap().and_utc(),
|
|
||||||
lessons: Vec::new(),
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
let lesson_time_column = days_markup[0].column + 1;
|
|
||||||
|
|
||||||
let row_distance = if day_index != days_markup.len() - 1 {
|
|
||||||
days_markup[day_index + 1].row
|
|
||||||
} else {
|
|
||||||
saturday_end_row
|
|
||||||
} - day_markup.row;
|
|
||||||
|
|
||||||
if days_times.len() != 6 {
|
|
||||||
let mut day_times: Vec<InternalTime> = Vec::new();
|
|
||||||
|
|
||||||
for row in day_markup.row..(day_markup.row + row_distance) {
|
|
||||||
// time
|
|
||||||
let time_opt = get_string_from_cell(&worksheet, row, lesson_time_column);
|
|
||||||
if time_opt.is_none() {
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
let time = time_opt.unwrap();
|
|
||||||
|
|
||||||
// type
|
|
||||||
let lesson_type = if time.contains("пара") {
|
|
||||||
LessonType::Default
|
|
||||||
} else {
|
|
||||||
LessonType::Additional
|
|
||||||
};
|
|
||||||
|
|
||||||
// lesson index
|
|
||||||
let default_index = if lesson_type == LessonType::Default {
|
|
||||||
Some(
|
|
||||||
time.chars()
|
|
||||||
.next()
|
|
||||||
.unwrap()
|
|
||||||
.to_string()
|
|
||||||
.parse::<u32>()
|
|
||||||
.unwrap(),
|
|
||||||
)
|
|
||||||
} else {
|
|
||||||
None
|
|
||||||
};
|
|
||||||
|
|
||||||
// time
|
|
||||||
let time_range = {
|
|
||||||
static TIME_RE: LazyLock<Regex> =
|
|
||||||
LazyLock::new(|| Regex::new(r"(\d+\.\d+)-(\d+\.\d+)").unwrap());
|
|
||||||
|
|
||||||
let parse_res = TIME_RE.captures(&time).ok_or(ParseError::GlobalTime(
|
|
||||||
ErrorCell::new(row, lesson_time_column, time.clone()),
|
|
||||||
))?;
|
|
||||||
|
|
||||||
let start_match = parse_res.get(1).unwrap().as_str();
|
|
||||||
let start_parts: Vec<&str> = start_match.split(".").collect();
|
|
||||||
|
|
||||||
let end_match = parse_res.get(2).unwrap().as_str();
|
|
||||||
let end_parts: Vec<&str> = end_match.split(".").collect();
|
|
||||||
|
|
||||||
static GET_TIME: fn(DateTime<Utc>, &Vec<&str>) -> DateTime<Utc> =
|
|
||||||
|date, parts| {
|
|
||||||
date + Duration::hours(parts[0].parse::<i64>().unwrap() - 4)
|
|
||||||
+ Duration::minutes(parts[1].parse::<i64>().unwrap())
|
|
||||||
};
|
|
||||||
|
|
||||||
LessonTime {
|
|
||||||
start: GET_TIME(day.date.clone(), &start_parts),
|
|
||||||
end: GET_TIME(day.date.clone(), &end_parts),
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
day_times.push(InternalTime {
|
|
||||||
time_range,
|
|
||||||
lesson_type,
|
|
||||||
default_index,
|
|
||||||
xls_range: get_merge_from_start(&worksheet, row, lesson_time_column),
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
days_times.push(day_times);
|
|
||||||
}
|
|
||||||
|
|
||||||
let day_times = &days_times[day_index];
|
|
||||||
|
|
||||||
for time in day_times {
|
|
||||||
match &mut parse_lesson(
|
|
||||||
&worksheet,
|
|
||||||
&mut day,
|
|
||||||
&day_times,
|
|
||||||
&time,
|
|
||||||
group_markup.column,
|
|
||||||
)? {
|
|
||||||
Lessons(l) => day.lessons.append(l),
|
|
||||||
Street(s) => day.street = Some(s.to_owned()),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
group.days.push(day);
|
|
||||||
}
|
|
||||||
|
|
||||||
groups.insert(group.name.clone(), group);
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(ParseResult {
|
|
||||||
teachers: convert_groups_to_teachers(&groups),
|
|
||||||
groups,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(test)]
|
|
||||||
pub mod tests {
|
|
||||||
use super::*;
|
|
||||||
|
|
||||||
pub fn test_result() -> Result<ParseResult, ParseError> {
|
|
||||||
parse_xls(&include_bytes!("../../schedule.xls").to_vec())
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn read() {
|
|
||||||
let result = test_result();
|
|
||||||
|
|
||||||
assert!(result.is_ok());
|
|
||||||
|
|
||||||
assert_ne!(result.as_ref().unwrap().groups.len(), 0);
|
|
||||||
assert_ne!(result.as_ref().unwrap().teachers.len(), 0);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,180 +0,0 @@
|
|||||||
use chrono::{DateTime, Utc};
|
|
||||||
use derive_more::{Display, Error};
|
|
||||||
use serde::{Deserialize, Serialize, Serializer};
|
|
||||||
use serde_repr::{Deserialize_repr, Serialize_repr};
|
|
||||||
use std::collections::HashMap;
|
|
||||||
use std::sync::Arc;
|
|
||||||
use utoipa::ToSchema;
|
|
||||||
|
|
||||||
/// The beginning and end of the lesson.
|
|
||||||
#[derive(Clone, Hash, Debug, Serialize, Deserialize, ToSchema)]
|
|
||||||
pub struct LessonTime {
|
|
||||||
/// The beginning of a lesson.
|
|
||||||
pub start: DateTime<Utc>,
|
|
||||||
|
|
||||||
/// The end of the lesson.
|
|
||||||
pub end: DateTime<Utc>,
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Type of lesson.
|
|
||||||
#[derive(Clone, Hash, PartialEq, Debug, Serialize_repr, Deserialize_repr, ToSchema)]
|
|
||||||
#[serde(rename_all = "SCREAMING_SNAKE_CASE")]
|
|
||||||
#[repr(u8)]
|
|
||||||
pub enum LessonType {
|
|
||||||
/// Обычная.
|
|
||||||
Default = 0,
|
|
||||||
|
|
||||||
/// Допы.
|
|
||||||
Additional,
|
|
||||||
|
|
||||||
/// Перемена.
|
|
||||||
Break,
|
|
||||||
|
|
||||||
/// Консультация.
|
|
||||||
Consultation,
|
|
||||||
|
|
||||||
/// Самостоятельная работа.
|
|
||||||
IndependentWork,
|
|
||||||
|
|
||||||
/// Зачёт.
|
|
||||||
Exam,
|
|
||||||
|
|
||||||
/// Зачёт с оценкой.
|
|
||||||
ExamWithGrade,
|
|
||||||
|
|
||||||
/// Экзамен.
|
|
||||||
ExamDefault,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Clone, Hash, Debug, Serialize, Deserialize, ToSchema)]
|
|
||||||
pub struct LessonSubGroup {
|
|
||||||
/// Index of subgroup.
|
|
||||||
pub number: u8,
|
|
||||||
|
|
||||||
/// Cabinet, if present.
|
|
||||||
pub cabinet: Option<String>,
|
|
||||||
|
|
||||||
/// Full name of the teacher.
|
|
||||||
pub teacher: String,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Clone, Hash, Debug, Serialize, Deserialize, ToSchema)]
|
|
||||||
#[serde(rename_all = "camelCase")]
|
|
||||||
pub struct Lesson {
|
|
||||||
/// Type.
|
|
||||||
#[serde(rename = "type")]
|
|
||||||
pub lesson_type: LessonType,
|
|
||||||
|
|
||||||
/// Lesson indexes, if present.
|
|
||||||
pub default_range: Option<[u8; 2]>,
|
|
||||||
|
|
||||||
/// Name.
|
|
||||||
pub name: Option<String>,
|
|
||||||
|
|
||||||
/// The beginning and end.
|
|
||||||
pub time: LessonTime,
|
|
||||||
|
|
||||||
/// List of subgroups.
|
|
||||||
#[serde(rename = "subGroups")]
|
|
||||||
pub subgroups: Option<Vec<LessonSubGroup>>,
|
|
||||||
|
|
||||||
/// Group name, if this is a schedule for teachers.
|
|
||||||
pub group: Option<String>,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Clone, Hash, Debug, Serialize, Deserialize, ToSchema)]
|
|
||||||
pub struct Day {
|
|
||||||
/// Day of the week.
|
|
||||||
pub name: String,
|
|
||||||
|
|
||||||
/// Address of another corps.
|
|
||||||
pub street: Option<String>,
|
|
||||||
|
|
||||||
/// Date.
|
|
||||||
pub date: DateTime<Utc>,
|
|
||||||
|
|
||||||
/// List of lessons on this day.
|
|
||||||
pub lessons: Vec<Lesson>,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Clone, Hash, Debug, Serialize, Deserialize, ToSchema)]
|
|
||||||
pub struct ScheduleEntry {
|
|
||||||
/// The name of the group or name of the teacher.
|
|
||||||
pub name: String,
|
|
||||||
|
|
||||||
/// List of six days.
|
|
||||||
pub days: Vec<Day>,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Clone)]
|
|
||||||
pub struct ParseResult {
|
|
||||||
/// List of groups.
|
|
||||||
pub groups: HashMap<String, ScheduleEntry>,
|
|
||||||
|
|
||||||
/// List of teachers.
|
|
||||||
pub teachers: HashMap<String, ScheduleEntry>,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Clone, Debug, Display, Error, ToSchema)]
|
|
||||||
#[display("row {row}, column {column}")]
|
|
||||||
pub struct ErrorCellPos {
|
|
||||||
pub row: u32,
|
|
||||||
pub column: u32,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Clone, Debug, Display, Error, ToSchema)]
|
|
||||||
#[display("'{data}' at {pos}")]
|
|
||||||
pub struct ErrorCell {
|
|
||||||
pub pos: ErrorCellPos,
|
|
||||||
pub data: String,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl ErrorCell {
|
|
||||||
pub fn new(row: u32, column: u32, data: String) -> Self {
|
|
||||||
Self {
|
|
||||||
pos: ErrorCellPos { row, column },
|
|
||||||
data,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Clone, Debug, Display, Error, ToSchema)]
|
|
||||||
pub enum ParseError {
|
|
||||||
/// Errors related to reading XLS file.
|
|
||||||
#[display("{_0:?}: Failed to read XLS file.")]
|
|
||||||
#[schema(value_type = String)]
|
|
||||||
BadXLS(Arc<calamine::XlsError>),
|
|
||||||
|
|
||||||
/// Not a single sheet was found.
|
|
||||||
#[display("No work sheets found.")]
|
|
||||||
NoWorkSheets,
|
|
||||||
|
|
||||||
/// There are no data on the boundaries of the sheet.
|
|
||||||
#[display("There is no data on work sheet boundaries.")]
|
|
||||||
UnknownWorkSheetRange,
|
|
||||||
|
|
||||||
/// Failed to read the beginning and end of the lesson from the line
|
|
||||||
#[display("Failed to read lesson start and end times from {_0}.")]
|
|
||||||
GlobalTime(ErrorCell),
|
|
||||||
|
|
||||||
/// Not found the beginning and the end corresponding to the lesson.
|
|
||||||
#[display("No start and end times matching the lesson (at {_0}) was found.")]
|
|
||||||
LessonTimeNotFound(ErrorCellPos),
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Serialize for ParseError {
|
|
||||||
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
|
|
||||||
where
|
|
||||||
S: Serializer,
|
|
||||||
{
|
|
||||||
match self {
|
|
||||||
ParseError::BadXLS(_) => serializer.serialize_str("BAD_XLS"),
|
|
||||||
ParseError::NoWorkSheets => serializer.serialize_str("NO_WORK_SHEETS"),
|
|
||||||
ParseError::UnknownWorkSheetRange => {
|
|
||||||
serializer.serialize_str("UNKNOWN_WORK_SHEET_RANGE")
|
|
||||||
}
|
|
||||||
ParseError::GlobalTime(_) => serializer.serialize_str("GLOBAL_TIME"),
|
|
||||||
ParseError::LessonTimeNotFound(_) => serializer.serialize_str("LESSON_TIME_NOT_FOUND"),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
1
src/routes/admin/mod.rs
Normal file
1
src/routes/admin/mod.rs
Normal file
@@ -0,0 +1 @@
|
|||||||
|
pub mod service_users;
|
||||||
75
src/routes/admin/service_users/create.rs
Normal file
75
src/routes/admin/service_users/create.rs
Normal file
@@ -0,0 +1,75 @@
|
|||||||
|
use self::schema::*;
|
||||||
|
use crate::{utility, AppState};
|
||||||
|
use actix_web::{post, web};
|
||||||
|
use database::entity::{ActiveServiceUser, UserType};
|
||||||
|
use database::query::Query;
|
||||||
|
use database::sea_orm::{ActiveModelTrait, Set};
|
||||||
|
use objectid::ObjectId;
|
||||||
|
use web::Json;
|
||||||
|
|
||||||
|
#[utoipa::path(responses(
|
||||||
|
(status = OK, body = Response),
|
||||||
|
))]
|
||||||
|
#[post("/create")]
|
||||||
|
pub async fn create(data_json: Json<Request>, app_state: web::Data<AppState>) -> ServiceResponse {
|
||||||
|
let service_user =
|
||||||
|
match Query::find_service_user_by_id(app_state.get_database(), &data_json.name)
|
||||||
|
.await
|
||||||
|
.expect("Failed to find service user by name")
|
||||||
|
{
|
||||||
|
Some(_) => return Err(ErrorCode::AlreadyExists).into(),
|
||||||
|
None => {
|
||||||
|
let new_user = ActiveServiceUser {
|
||||||
|
id: Set(ObjectId::new().unwrap().to_string()),
|
||||||
|
name: Set(data_json.name.clone()),
|
||||||
|
};
|
||||||
|
|
||||||
|
new_user
|
||||||
|
.insert(app_state.get_database())
|
||||||
|
.await
|
||||||
|
.expect("Failed to insert service user")
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
let access_token = utility::jwt::encode(UserType::Service, &service_user.id);
|
||||||
|
Ok(Response::new(access_token)).into()
|
||||||
|
}
|
||||||
|
|
||||||
|
mod schema {
|
||||||
|
use actix_macros::{ErrResponse, OkResponse};
|
||||||
|
use derive_more::Display;
|
||||||
|
use serde::{Deserialize, Serialize};
|
||||||
|
use utoipa::ToSchema;
|
||||||
|
|
||||||
|
#[derive(Debug, Deserialize, Serialize, ToSchema)]
|
||||||
|
#[serde(rename_all = "camelCase")]
|
||||||
|
#[schema(as = ServiceUser::Create::Request)]
|
||||||
|
pub struct Request {
|
||||||
|
/// Service username.
|
||||||
|
pub name: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Serialize, ToSchema, OkResponse)]
|
||||||
|
#[serde(rename_all = "camelCase")]
|
||||||
|
#[schema(as = ServiceUser::Create::Response)]
|
||||||
|
pub struct Response {
|
||||||
|
access_token: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Response {
|
||||||
|
pub fn new(access_token: String) -> Self {
|
||||||
|
Self { access_token }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub type ServiceResponse = crate::routes::schema::Response<Response, ErrorCode>;
|
||||||
|
|
||||||
|
#[derive(Clone, ToSchema, Display, ErrResponse, Serialize)]
|
||||||
|
#[serde(rename_all = "SCREAMING_SNAKE_CASE")]
|
||||||
|
#[status_code = "actix_web::http::StatusCode::UNAUTHORIZED"]
|
||||||
|
#[schema(as = ServiceUser::Create::ErrorCode)]
|
||||||
|
pub enum ErrorCode {
|
||||||
|
#[display("Service user with that name already exists.")]
|
||||||
|
AlreadyExists,
|
||||||
|
}
|
||||||
|
}
|
||||||
3
src/routes/admin/service_users/mod.rs
Normal file
3
src/routes/admin/service_users/mod.rs
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
mod create;
|
||||||
|
|
||||||
|
pub use create::*;
|
||||||
@@ -1,6 +1,6 @@
|
|||||||
|
mod shared;
|
||||||
mod sign_in;
|
mod sign_in;
|
||||||
mod sign_up;
|
mod sign_up;
|
||||||
mod shared;
|
|
||||||
|
|
||||||
pub use sign_in::*;
|
pub use sign_in::*;
|
||||||
pub use sign_up::*;
|
pub use sign_up::*;
|
||||||
|
|||||||
@@ -1,23 +1,10 @@
|
|||||||
use crate::utility::jwt::DEFAULT_ALGORITHM;
|
|
||||||
use jsonwebtoken::errors::ErrorKind;
|
use jsonwebtoken::errors::ErrorKind;
|
||||||
use jsonwebtoken::{decode, DecodingKey, Validation};
|
use jsonwebtoken::{Algorithm, DecodingKey, Validation, decode};
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
use std::env;
|
|
||||||
use std::sync::LazyLock;
|
|
||||||
|
|
||||||
#[derive(Deserialize, Serialize)]
|
|
||||||
struct TokenData {
|
|
||||||
iis: String,
|
|
||||||
sub: i32,
|
|
||||||
app: i32,
|
|
||||||
exp: i32,
|
|
||||||
iat: i32,
|
|
||||||
jti: i32,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Serialize, Deserialize)]
|
#[derive(Debug, Serialize, Deserialize)]
|
||||||
struct Claims {
|
struct Claims {
|
||||||
sub: String,
|
sub: i32,
|
||||||
iis: String,
|
iis: String,
|
||||||
jti: i32,
|
jti: i32,
|
||||||
app: i32,
|
app: i32,
|
||||||
@@ -25,7 +12,7 @@ struct Claims {
|
|||||||
|
|
||||||
#[derive(Debug, PartialEq)]
|
#[derive(Debug, PartialEq)]
|
||||||
pub enum Error {
|
pub enum Error {
|
||||||
JwtError(ErrorKind),
|
Jwt(ErrorKind),
|
||||||
InvalidSignature,
|
InvalidSignature,
|
||||||
InvalidToken,
|
InvalidToken,
|
||||||
Expired,
|
Expired,
|
||||||
@@ -52,17 +39,10 @@ const VK_PUBLIC_KEY: &str = concat!(
|
|||||||
"-----END PUBLIC KEY-----"
|
"-----END PUBLIC KEY-----"
|
||||||
);
|
);
|
||||||
|
|
||||||
static VK_ID_CLIENT_ID: LazyLock<i32> = LazyLock::new(|| {
|
pub fn parse_vk_id(token_str: &str, client_id: i32) -> Result<i32, Error> {
|
||||||
env::var("VK_ID_CLIENT_ID")
|
|
||||||
.expect("VK_ID_CLIENT_ID must be set")
|
|
||||||
.parse::<i32>()
|
|
||||||
.expect("VK_ID_CLIENT_ID must be i32")
|
|
||||||
});
|
|
||||||
|
|
||||||
pub fn parse_vk_id(token_str: &String) -> Result<i32, Error> {
|
|
||||||
let dkey = DecodingKey::from_rsa_pem(VK_PUBLIC_KEY.as_bytes()).unwrap();
|
let dkey = DecodingKey::from_rsa_pem(VK_PUBLIC_KEY.as_bytes()).unwrap();
|
||||||
|
|
||||||
match decode::<Claims>(&token_str, &dkey, &Validation::new(DEFAULT_ALGORITHM)) {
|
match decode::<Claims>(token_str, &dkey, &Validation::new(Algorithm::RS256)) {
|
||||||
Ok(token_data) => {
|
Ok(token_data) => {
|
||||||
let claims = token_data.claims;
|
let claims = token_data.claims;
|
||||||
|
|
||||||
@@ -70,13 +50,10 @@ pub fn parse_vk_id(token_str: &String) -> Result<i32, Error> {
|
|||||||
Err(Error::UnknownIssuer(claims.iis))
|
Err(Error::UnknownIssuer(claims.iis))
|
||||||
} else if claims.jti != 21 {
|
} else if claims.jti != 21 {
|
||||||
Err(Error::UnknownType(claims.jti))
|
Err(Error::UnknownType(claims.jti))
|
||||||
} else if claims.app != *VK_ID_CLIENT_ID {
|
} else if claims.app != client_id {
|
||||||
Err(Error::UnknownClientId(claims.app))
|
Err(Error::UnknownClientId(claims.app))
|
||||||
} else {
|
} else {
|
||||||
match claims.sub.parse::<i32>() {
|
Ok(claims.sub)
|
||||||
Ok(sub) => Ok(sub),
|
|
||||||
Err(_) => Err(Error::InvalidToken),
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Err(err) => Err(match err.into_kind() {
|
Err(err) => Err(match err.into_kind() {
|
||||||
@@ -90,7 +67,7 @@ pub fn parse_vk_id(token_str: &String) -> Result<i32, Error> {
|
|||||||
ErrorKind::Base64(_) => Error::InvalidToken,
|
ErrorKind::Base64(_) => Error::InvalidToken,
|
||||||
ErrorKind::Json(_) => Error::InvalidToken,
|
ErrorKind::Json(_) => Error::InvalidToken,
|
||||||
ErrorKind::Utf8(_) => Error::InvalidToken,
|
ErrorKind::Utf8(_) => Error::InvalidToken,
|
||||||
kind => Error::JwtError(kind),
|
kind => Error::Jwt(kind),
|
||||||
}),
|
}),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,29 +1,35 @@
|
|||||||
use self::schema::*;
|
use self::schema::*;
|
||||||
use crate::database::driver;
|
|
||||||
use crate::database::models::User;
|
|
||||||
use crate::routes::auth::shared::parse_vk_id;
|
use crate::routes::auth::shared::parse_vk_id;
|
||||||
use crate::routes::auth::sign_in::schema::SignInData::{Default, Vk};
|
use crate::routes::auth::sign_in::schema::SignInData::{Default, VkOAuth};
|
||||||
use crate::routes::schema::user::UserResponse;
|
use crate::routes::schema::user::UserResponse;
|
||||||
use crate::routes::schema::{IntoResponseAsError, ResponseError};
|
use crate::routes::schema::ResponseError;
|
||||||
use crate::utility::mutex::MutexScope;
|
use crate::{utility, AppState};
|
||||||
use crate::{AppState, utility};
|
|
||||||
use actix_web::{post, web};
|
use actix_web::{post, web};
|
||||||
use diesel::SaveChangesDsl;
|
use database::query::Query;
|
||||||
use web::Json;
|
use web::Json;
|
||||||
|
use database::entity::UserType;
|
||||||
|
|
||||||
async fn sign_in_combined(
|
async fn sign_in_combined(
|
||||||
data: SignInData,
|
data: SignInData,
|
||||||
app_state: &web::Data<AppState>,
|
app_state: &web::Data<AppState>,
|
||||||
) -> Result<UserResponse, ErrorCode> {
|
) -> Result<UserResponse, ErrorCode> {
|
||||||
|
let db = app_state.get_database();
|
||||||
|
|
||||||
let user = match &data {
|
let user = match &data {
|
||||||
Default(data) => driver::users::get_by_username(&app_state, &data.username),
|
Default(data) => Query::find_user_by_username(db, &data.username).await,
|
||||||
Vk(id) => driver::users::get_by_vk_id(&app_state, *id),
|
VkOAuth(id) => Query::find_user_by_vk_id(db, *id).await,
|
||||||
};
|
}
|
||||||
|
.ok()
|
||||||
|
.flatten();
|
||||||
|
|
||||||
match user {
|
match user {
|
||||||
Ok(mut user) => {
|
Some(user) => {
|
||||||
if let Default(data) = data {
|
if let Default(data) = data {
|
||||||
match bcrypt::verify(&data.password, &user.password) {
|
if user.password.is_none() {
|
||||||
|
return Err(ErrorCode::IncorrectCredentials);
|
||||||
|
}
|
||||||
|
|
||||||
|
match bcrypt::verify(&data.password, user.password.as_ref().unwrap()) {
|
||||||
Ok(result) => {
|
Ok(result) => {
|
||||||
if !result {
|
if !result {
|
||||||
return Err(ErrorCode::IncorrectCredentials);
|
return Err(ErrorCode::IncorrectCredentials);
|
||||||
@@ -35,17 +41,11 @@ async fn sign_in_combined(
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
user.access_token = utility::jwt::encode(&user.id);
|
let access_token = utility::jwt::encode(UserType::Default, &user.id);
|
||||||
|
Ok(UserResponse::from_user_with_token(user, access_token))
|
||||||
app_state.database.scope(|conn| {
|
|
||||||
user.save_changes::<User>(conn)
|
|
||||||
.expect("Failed to update user")
|
|
||||||
});
|
|
||||||
|
|
||||||
Ok(user.into())
|
|
||||||
}
|
}
|
||||||
|
|
||||||
Err(_) => Err(ErrorCode::IncorrectCredentials),
|
None => Err(ErrorCode::IncorrectCredentials),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -71,15 +71,17 @@ pub async fn sign_in_vk(
|
|||||||
) -> ServiceResponse {
|
) -> ServiceResponse {
|
||||||
let data = data_json.into_inner();
|
let data = data_json.into_inner();
|
||||||
|
|
||||||
match parse_vk_id(&data.access_token) {
|
match parse_vk_id(&data.access_token, app_state.get_env().vk_id.client_id) {
|
||||||
Ok(id) => sign_in_combined(Vk(id), &app_state).await.into(),
|
Ok(id) => sign_in_combined(VkOAuth(id), &app_state).await,
|
||||||
Err(_) => ErrorCode::InvalidVkAccessToken.into_response(),
|
Err(_) => Err(ErrorCode::InvalidVkAccessToken),
|
||||||
}
|
}
|
||||||
|
.into()
|
||||||
}
|
}
|
||||||
|
|
||||||
mod schema {
|
mod schema {
|
||||||
use crate::routes::schema::user::UserResponse;
|
use crate::routes::schema::user::UserResponse;
|
||||||
use actix_macros::{IntoResponseError, StatusCode};
|
use actix_macros::ErrResponse;
|
||||||
|
use derive_more::Display;
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
use utoipa::ToSchema;
|
use utoipa::ToSchema;
|
||||||
|
|
||||||
@@ -109,43 +111,43 @@ mod schema {
|
|||||||
|
|
||||||
pub type ServiceResponse = crate::routes::schema::Response<UserResponse, ErrorCode>;
|
pub type ServiceResponse = crate::routes::schema::Response<UserResponse, ErrorCode>;
|
||||||
|
|
||||||
#[derive(Serialize, ToSchema, Clone, IntoResponseError, StatusCode)]
|
#[derive(Clone, Serialize, Display, ToSchema, ErrResponse)]
|
||||||
#[serde(rename_all = "SCREAMING_SNAKE_CASE")]
|
|
||||||
#[schema(as = SignIn::ErrorCode)]
|
#[schema(as = SignIn::ErrorCode)]
|
||||||
|
#[serde(rename_all = "SCREAMING_SNAKE_CASE")]
|
||||||
#[status_code = "actix_web::http::StatusCode::NOT_ACCEPTABLE"]
|
#[status_code = "actix_web::http::StatusCode::NOT_ACCEPTABLE"]
|
||||||
pub enum ErrorCode {
|
pub enum ErrorCode {
|
||||||
/// Incorrect username or password.
|
/// Incorrect username or password.
|
||||||
|
#[display("Incorrect username or password.")]
|
||||||
IncorrectCredentials,
|
IncorrectCredentials,
|
||||||
|
|
||||||
/// Invalid VK ID token.
|
/// Invalid VK ID token.
|
||||||
|
#[display("Invalid VK ID token.")]
|
||||||
InvalidVkAccessToken,
|
InvalidVkAccessToken,
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Internal
|
|
||||||
|
|
||||||
/// Type of authorization.
|
/// Type of authorization.
|
||||||
pub enum SignInData {
|
pub enum SignInData {
|
||||||
/// User and password name and password.
|
/// User and password name and password.
|
||||||
Default(Request),
|
Default(Request),
|
||||||
|
|
||||||
/// Identifier of the attached account VK.
|
/// Identifier of the attached account VK.
|
||||||
Vk(i32),
|
VkOAuth(i32),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
use super::schema::*;
|
use super::schema::*;
|
||||||
use crate::database::driver;
|
|
||||||
use crate::database::models::{User, UserRole};
|
|
||||||
use crate::routes::auth::sign_in::sign_in;
|
use crate::routes::auth::sign_in::sign_in;
|
||||||
use crate::test_env::tests::{static_app_state, test_app_state, test_env};
|
use crate::test_env::tests::{static_app_state, test_app_state, test_env};
|
||||||
use crate::utility;
|
|
||||||
use actix_test::test_app;
|
use actix_test::test_app;
|
||||||
use actix_web::dev::ServiceResponse;
|
use actix_web::dev::ServiceResponse;
|
||||||
use actix_web::http::Method;
|
use actix_web::http::Method;
|
||||||
use actix_web::http::StatusCode;
|
use actix_web::http::StatusCode;
|
||||||
use actix_web::test;
|
use actix_web::test;
|
||||||
|
use database::entity::sea_orm_active_enums::UserRole;
|
||||||
|
use database::entity::ActiveUser;
|
||||||
|
use database::sea_orm::{ActiveModelTrait, Set};
|
||||||
use sha1::{Digest, Sha1};
|
use sha1::{Digest, Sha1};
|
||||||
use std::fmt::Write;
|
use std::fmt::Write;
|
||||||
|
|
||||||
@@ -179,20 +181,22 @@ mod tests {
|
|||||||
test_env();
|
test_env();
|
||||||
|
|
||||||
let app_state = static_app_state().await;
|
let app_state = static_app_state().await;
|
||||||
driver::users::insert_or_ignore(
|
|
||||||
&app_state,
|
let active_user = ActiveUser {
|
||||||
&User {
|
id: Set(id.clone()),
|
||||||
id: id.clone(),
|
username: Set(username),
|
||||||
username,
|
password: Set(Some(bcrypt::hash("example", bcrypt::DEFAULT_COST).unwrap())),
|
||||||
password: bcrypt::hash("example".to_string(), bcrypt::DEFAULT_COST).unwrap(),
|
vk_id: Set(None),
|
||||||
vk_id: None,
|
telegram_id: Set(None),
|
||||||
access_token: utility::jwt::encode(&id),
|
group: Set(Some("ИС-214/23".to_string())),
|
||||||
group: "ИС-214/23".to_string(),
|
role: Set(UserRole::Student),
|
||||||
role: UserRole::Student,
|
android_version: Set(None),
|
||||||
version: "1.0.0".to_string(),
|
};
|
||||||
},
|
|
||||||
)
|
active_user
|
||||||
.unwrap();
|
.save(app_state.get_database())
|
||||||
|
.await
|
||||||
|
.expect("Failed to save user");
|
||||||
}
|
}
|
||||||
|
|
||||||
#[actix_web::test]
|
#[actix_web::test]
|
||||||
|
|||||||
@@ -1,12 +1,13 @@
|
|||||||
use self::schema::*;
|
use self::schema::*;
|
||||||
use crate::AppState;
|
use crate::routes::auth::shared::parse_vk_id;
|
||||||
use crate::database::driver;
|
|
||||||
use crate::database::models::UserRole;
|
|
||||||
use crate::routes::auth::shared::{Error, parse_vk_id};
|
|
||||||
use crate::routes::schema::user::UserResponse;
|
use crate::routes::schema::user::UserResponse;
|
||||||
use crate::routes::schema::{IntoResponseAsError, ResponseError};
|
use crate::routes::schema::ResponseError;
|
||||||
|
use crate::{utility, AppState};
|
||||||
use actix_web::{post, web};
|
use actix_web::{post, web};
|
||||||
use rand::{Rng, rng};
|
use database::entity::sea_orm_active_enums::UserRole;
|
||||||
|
use database::entity::{ActiveUser, UserType};
|
||||||
|
use database::query::Query;
|
||||||
|
use database::sea_orm::ActiveModelTrait;
|
||||||
use web::Json;
|
use web::Json;
|
||||||
|
|
||||||
async fn sign_up_combined(
|
async fn sign_up_combined(
|
||||||
@@ -18,31 +19,41 @@ async fn sign_up_combined(
|
|||||||
return Err(ErrorCode::DisallowedRole);
|
return Err(ErrorCode::DisallowedRole);
|
||||||
}
|
}
|
||||||
|
|
||||||
// If specified group doesn't exist in schedule.
|
if !app_state
|
||||||
let schedule_opt = app_state.schedule.lock().unwrap();
|
.get_schedule_snapshot("eng_polytechnic")
|
||||||
|
.await
|
||||||
if let Some(schedule) = &*schedule_opt {
|
.unwrap()
|
||||||
if !schedule.data.groups.contains_key(&data.group) {
|
.data
|
||||||
|
.groups
|
||||||
|
.contains_key(&data.group)
|
||||||
|
{
|
||||||
return Err(ErrorCode::InvalidGroupName);
|
return Err(ErrorCode::InvalidGroupName);
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
// If user with specified username already exists.
|
let db = app_state.get_database();
|
||||||
if driver::users::contains_by_username(&app_state, &data.username) {
|
|
||||||
|
// If user with specified username already exists.O
|
||||||
|
if Query::find_user_by_username(db, &data.username)
|
||||||
|
.await
|
||||||
|
.is_ok_and(|user| user.is_some())
|
||||||
|
{
|
||||||
return Err(ErrorCode::UsernameAlreadyExists);
|
return Err(ErrorCode::UsernameAlreadyExists);
|
||||||
}
|
}
|
||||||
|
|
||||||
// If user with specified VKID already exists.
|
// If user with specified VKID already exists.
|
||||||
if let Some(id) = data.vk_id {
|
if let Some(id) = data.vk_id
|
||||||
if driver::users::contains_by_vk_id(&app_state, id) {
|
&& Query::is_user_exists_by_vk_id(db, id)
|
||||||
|
.await
|
||||||
|
.expect("Failed to check user existence")
|
||||||
|
{
|
||||||
return Err(ErrorCode::VkAlreadyExists);
|
return Err(ErrorCode::VkAlreadyExists);
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
let user = data.into();
|
let active_user: ActiveUser = data.into();
|
||||||
driver::users::insert(&app_state, &user).unwrap();
|
let user = active_user.insert(db).await.unwrap();
|
||||||
|
let access_token = utility::jwt::encode(UserType::Default, &user.id);
|
||||||
|
|
||||||
Ok(UserResponse::from(&user)).into()
|
Ok(UserResponse::from_user_with_token(user, access_token))
|
||||||
}
|
}
|
||||||
|
|
||||||
#[utoipa::path(responses(
|
#[utoipa::path(responses(
|
||||||
@@ -56,7 +67,7 @@ pub async fn sign_up(data_json: Json<Request>, app_state: web::Data<AppState>) -
|
|||||||
sign_up_combined(
|
sign_up_combined(
|
||||||
SignUpData {
|
SignUpData {
|
||||||
username: data.username,
|
username: data.username,
|
||||||
password: data.password,
|
password: Some(data.password),
|
||||||
vk_id: None,
|
vk_id: None,
|
||||||
group: data.group,
|
group: data.group,
|
||||||
role: data.role,
|
role: data.role,
|
||||||
@@ -79,15 +90,12 @@ pub async fn sign_up_vk(
|
|||||||
) -> ServiceResponse {
|
) -> ServiceResponse {
|
||||||
let data = data_json.into_inner();
|
let data = data_json.into_inner();
|
||||||
|
|
||||||
match parse_vk_id(&data.access_token) {
|
match parse_vk_id(&data.access_token, app_state.get_env().vk_id.client_id) {
|
||||||
Ok(id) => sign_up_combined(
|
Ok(id) => {
|
||||||
|
sign_up_combined(
|
||||||
SignUpData {
|
SignUpData {
|
||||||
username: data.username,
|
username: data.username,
|
||||||
password: rng()
|
password: None,
|
||||||
.sample_iter(&rand::distr::Alphanumeric)
|
|
||||||
.take(16)
|
|
||||||
.map(char::from)
|
|
||||||
.collect(),
|
|
||||||
vk_id: Some(id),
|
vk_id: Some(id),
|
||||||
group: data.group,
|
group: data.group,
|
||||||
role: data.role,
|
role: data.role,
|
||||||
@@ -96,23 +104,19 @@ pub async fn sign_up_vk(
|
|||||||
&app_state,
|
&app_state,
|
||||||
)
|
)
|
||||||
.await
|
.await
|
||||||
.into(),
|
|
||||||
Err(err) => {
|
|
||||||
if err != Error::Expired {
|
|
||||||
eprintln!("Failed to parse vk id token!");
|
|
||||||
eprintln!("{:?}", err);
|
|
||||||
}
|
|
||||||
|
|
||||||
ErrorCode::InvalidVkAccessToken.into_response()
|
|
||||||
}
|
}
|
||||||
|
Err(_) => Err(ErrorCode::InvalidVkAccessToken),
|
||||||
}
|
}
|
||||||
|
.into()
|
||||||
}
|
}
|
||||||
|
|
||||||
mod schema {
|
mod schema {
|
||||||
use crate::database::models::{User, UserRole};
|
|
||||||
use crate::routes::schema::user::UserResponse;
|
use crate::routes::schema::user::UserResponse;
|
||||||
use crate::utility;
|
use actix_macros::ErrResponse;
|
||||||
use actix_macros::{IntoResponseError, StatusCode};
|
use database::entity::sea_orm_active_enums::UserRole;
|
||||||
|
use database::entity::ActiveUser;
|
||||||
|
use database::sea_orm::Set;
|
||||||
|
use derive_more::Display;
|
||||||
use objectid::ObjectId;
|
use objectid::ObjectId;
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
|
|
||||||
@@ -141,7 +145,7 @@ mod schema {
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub mod vk {
|
pub mod vk {
|
||||||
use crate::database::models::UserRole;
|
use database::entity::sea_orm_active_enums::UserRole;
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
|
|
||||||
#[derive(Serialize, Deserialize, utoipa::ToSchema)]
|
#[derive(Serialize, Deserialize, utoipa::ToSchema)]
|
||||||
@@ -170,38 +174,42 @@ mod schema {
|
|||||||
|
|
||||||
pub type ServiceResponse = crate::routes::schema::Response<UserResponse, ErrorCode>;
|
pub type ServiceResponse = crate::routes::schema::Response<UserResponse, ErrorCode>;
|
||||||
|
|
||||||
#[derive(Clone, Serialize, utoipa::ToSchema, IntoResponseError, StatusCode)]
|
#[derive(Clone, Serialize, Display, utoipa::ToSchema, ErrResponse)]
|
||||||
#[serde(rename_all = "SCREAMING_SNAKE_CASE")]
|
#[serde(rename_all = "SCREAMING_SNAKE_CASE")]
|
||||||
#[schema(as = SignUp::ErrorCode)]
|
#[schema(as = SignUp::ErrorCode)]
|
||||||
#[status_code = "actix_web::http::StatusCode::NOT_ACCEPTABLE"]
|
#[status_code = "actix_web::http::StatusCode::NOT_ACCEPTABLE"]
|
||||||
pub enum ErrorCode {
|
pub enum ErrorCode {
|
||||||
/// Conveyed the role of Admin.
|
/// Conveyed the role of Admin.
|
||||||
|
#[display("Conveyed the role of Admin.")]
|
||||||
DisallowedRole,
|
DisallowedRole,
|
||||||
|
|
||||||
/// Unknown name of the group.
|
/// Unknown name of the group.
|
||||||
|
#[display("Unknown name of the group.")]
|
||||||
InvalidGroupName,
|
InvalidGroupName,
|
||||||
|
|
||||||
/// User with this name is already registered.
|
/// User with this name is already registered.
|
||||||
|
#[display("User with this name is already registered.")]
|
||||||
UsernameAlreadyExists,
|
UsernameAlreadyExists,
|
||||||
|
|
||||||
/// Invalid VK ID token.
|
/// Invalid VK ID token.
|
||||||
|
#[display("Invalid VK ID token.")]
|
||||||
InvalidVkAccessToken,
|
InvalidVkAccessToken,
|
||||||
|
|
||||||
/// User with such an account VK is already registered.
|
/// User with such an account VK is already registered.
|
||||||
|
#[display("User with such an account VK is already registered.")]
|
||||||
VkAlreadyExists,
|
VkAlreadyExists,
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Internal
|
|
||||||
|
|
||||||
/// Data for registration.
|
/// Data for registration.
|
||||||
pub struct SignUpData {
|
pub struct SignUpData {
|
||||||
|
// TODO: сделать ограничение на минимальную и максимальную длину при регистрации и смене.
|
||||||
/// User name.
|
/// User name.
|
||||||
pub username: String,
|
pub username: String,
|
||||||
|
|
||||||
/// Password.
|
/// Password.
|
||||||
///
|
///
|
||||||
/// Should be present even if registration occurs using the VK ID token.
|
/// Should be present even if registration occurs using the VK ID token.
|
||||||
pub password: String,
|
pub password: Option<String>,
|
||||||
|
|
||||||
/// Account identifier VK.
|
/// Account identifier VK.
|
||||||
pub vk_id: Option<i32>,
|
pub vk_id: Option<i32>,
|
||||||
@@ -216,20 +224,21 @@ mod schema {
|
|||||||
pub version: String,
|
pub version: String,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Into<User> for SignUpData {
|
impl From<SignUpData> for ActiveUser {
|
||||||
fn into(self) -> User {
|
fn from(value: SignUpData) -> Self {
|
||||||
let id = ObjectId::new().unwrap().to_string();
|
assert_ne!(value.password.is_some(), value.vk_id.is_some());
|
||||||
let access_token = utility::jwt::encode(&id);
|
|
||||||
|
|
||||||
User {
|
ActiveUser {
|
||||||
id,
|
id: Set(ObjectId::new().unwrap().to_string()),
|
||||||
username: self.username,
|
username: Set(value.username),
|
||||||
password: bcrypt::hash(self.password, bcrypt::DEFAULT_COST).unwrap(),
|
password: Set(value
|
||||||
vk_id: self.vk_id,
|
.password
|
||||||
access_token,
|
.map(|x| bcrypt::hash(x, bcrypt::DEFAULT_COST).unwrap())),
|
||||||
group: self.group,
|
vk_id: Set(value.vk_id),
|
||||||
role: self.role,
|
telegram_id: Set(None),
|
||||||
version: self.version,
|
group: Set(Some(value.group)),
|
||||||
|
role: Set(value.role),
|
||||||
|
android_version: Set(Some(value.version)),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -237,8 +246,6 @@ mod schema {
|
|||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
use crate::database::driver;
|
|
||||||
use crate::database::models::UserRole;
|
|
||||||
use crate::routes::auth::sign_up::schema::Request;
|
use crate::routes::auth::sign_up::schema::Request;
|
||||||
use crate::routes::auth::sign_up::sign_up;
|
use crate::routes::auth::sign_up::sign_up;
|
||||||
use crate::test_env::tests::{static_app_state, test_app_state, test_env};
|
use crate::test_env::tests::{static_app_state, test_app_state, test_env};
|
||||||
@@ -247,22 +254,26 @@ mod tests {
|
|||||||
use actix_web::http::Method;
|
use actix_web::http::Method;
|
||||||
use actix_web::http::StatusCode;
|
use actix_web::http::StatusCode;
|
||||||
use actix_web::test;
|
use actix_web::test;
|
||||||
|
use database::entity::sea_orm_active_enums::UserRole;
|
||||||
|
use database::entity::{UserColumn, UserEntity};
|
||||||
|
use database::sea_orm::ColumnTrait;
|
||||||
|
use database::sea_orm::{EntityTrait, QueryFilter};
|
||||||
|
|
||||||
struct SignUpPartial {
|
struct SignUpPartial<'a> {
|
||||||
username: String,
|
username: &'a str,
|
||||||
group: String,
|
group: &'a str,
|
||||||
role: UserRole,
|
role: UserRole,
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn sign_up_client(data: SignUpPartial) -> ServiceResponse {
|
async fn sign_up_client(data: SignUpPartial<'_>) -> ServiceResponse {
|
||||||
let app = test_app(test_app_state().await, sign_up).await;
|
let app = test_app(test_app_state().await, sign_up).await;
|
||||||
|
|
||||||
let req = test::TestRequest::with_uri("/sign-up")
|
let req = test::TestRequest::with_uri("/sign-up")
|
||||||
.method(Method::POST)
|
.method(Method::POST)
|
||||||
.set_json(Request {
|
.set_json(Request {
|
||||||
username: data.username.clone(),
|
username: data.username.to_string(),
|
||||||
password: "example".to_string(),
|
password: "example".to_string(),
|
||||||
group: data.group.clone(),
|
group: data.group.to_string(),
|
||||||
role: data.role.clone(),
|
role: data.role.clone(),
|
||||||
version: "1.0.0".to_string(),
|
version: "1.0.0".to_string(),
|
||||||
})
|
})
|
||||||
@@ -278,13 +289,18 @@ mod tests {
|
|||||||
test_env();
|
test_env();
|
||||||
|
|
||||||
let app_state = static_app_state().await;
|
let app_state = static_app_state().await;
|
||||||
driver::users::delete_by_username(&app_state, &"test::sign_up_valid".to_string());
|
|
||||||
|
UserEntity::delete_many()
|
||||||
|
.filter(UserColumn::Username.eq("test::sign_up_valid"))
|
||||||
|
.exec(app_state.get_database())
|
||||||
|
.await
|
||||||
|
.expect("Failed to delete user");
|
||||||
|
|
||||||
// test
|
// test
|
||||||
|
|
||||||
let resp = sign_up_client(SignUpPartial {
|
let resp = sign_up_client(SignUpPartial {
|
||||||
username: "test::sign_up_valid".to_string(),
|
username: "test::sign_up_valid",
|
||||||
group: "ИС-214/23".to_string(),
|
group: "ИС-214/23",
|
||||||
role: UserRole::Student,
|
role: UserRole::Student,
|
||||||
})
|
})
|
||||||
.await;
|
.await;
|
||||||
@@ -299,11 +315,16 @@ mod tests {
|
|||||||
test_env();
|
test_env();
|
||||||
|
|
||||||
let app_state = static_app_state().await;
|
let app_state = static_app_state().await;
|
||||||
driver::users::delete_by_username(&app_state, &"test::sign_up_multiple".to_string());
|
|
||||||
|
UserEntity::delete_many()
|
||||||
|
.filter(UserColumn::Username.eq("test::sign_up_multiple"))
|
||||||
|
.exec(app_state.get_database())
|
||||||
|
.await
|
||||||
|
.expect("Failed to delete user");
|
||||||
|
|
||||||
let create = sign_up_client(SignUpPartial {
|
let create = sign_up_client(SignUpPartial {
|
||||||
username: "test::sign_up_multiple".to_string(),
|
username: "test::sign_up_multiple",
|
||||||
group: "ИС-214/23".to_string(),
|
group: "ИС-214/23",
|
||||||
role: UserRole::Student,
|
role: UserRole::Student,
|
||||||
})
|
})
|
||||||
.await;
|
.await;
|
||||||
@@ -311,8 +332,8 @@ mod tests {
|
|||||||
assert_eq!(create.status(), StatusCode::OK);
|
assert_eq!(create.status(), StatusCode::OK);
|
||||||
|
|
||||||
let resp = sign_up_client(SignUpPartial {
|
let resp = sign_up_client(SignUpPartial {
|
||||||
username: "test::sign_up_multiple".to_string(),
|
username: "test::sign_up_multiple",
|
||||||
group: "ИС-214/23".to_string(),
|
group: "ИС-214/23",
|
||||||
role: UserRole::Student,
|
role: UserRole::Student,
|
||||||
})
|
})
|
||||||
.await;
|
.await;
|
||||||
@@ -326,8 +347,8 @@ mod tests {
|
|||||||
|
|
||||||
// test
|
// test
|
||||||
let resp = sign_up_client(SignUpPartial {
|
let resp = sign_up_client(SignUpPartial {
|
||||||
username: "test::sign_up_invalid_role".to_string(),
|
username: "test::sign_up_invalid_role",
|
||||||
group: "ИС-214/23".to_string(),
|
group: "ИС-214/23",
|
||||||
role: UserRole::Admin,
|
role: UserRole::Admin,
|
||||||
})
|
})
|
||||||
.await;
|
.await;
|
||||||
@@ -341,8 +362,8 @@ mod tests {
|
|||||||
|
|
||||||
// test
|
// test
|
||||||
let resp = sign_up_client(SignUpPartial {
|
let resp = sign_up_client(SignUpPartial {
|
||||||
username: "test::sign_up_invalid_group".to_string(),
|
username: "test::sign_up_invalid_group",
|
||||||
group: "invalid_group".to_string(),
|
group: "invalid_group",
|
||||||
role: UserRole::Student,
|
role: UserRole::Student,
|
||||||
})
|
})
|
||||||
.await;
|
.await;
|
||||||
|
|||||||
@@ -1,5 +0,0 @@
|
|||||||
mod update_callback;
|
|
||||||
mod set_token;
|
|
||||||
|
|
||||||
pub use update_callback::*;
|
|
||||||
pub use set_token::*;
|
|
||||||
@@ -1,114 +0,0 @@
|
|||||||
use crate::app_state::AppState;
|
|
||||||
use crate::database;
|
|
||||||
use crate::database::models::FCM;
|
|
||||||
use crate::extractors::authorized_user::UserExtractor;
|
|
||||||
use crate::extractors::base::SyncExtractor;
|
|
||||||
use crate::utility::mutex::{MutexScope, MutexScopeAsync};
|
|
||||||
use actix_web::{HttpResponse, Responder, patch, web};
|
|
||||||
use diesel::{RunQueryDsl, SaveChangesDsl};
|
|
||||||
use firebase_messaging_rs::FCMClient;
|
|
||||||
use firebase_messaging_rs::topic::{TopicManagementError, TopicManagementSupport};
|
|
||||||
use serde::Deserialize;
|
|
||||||
|
|
||||||
#[derive(Debug, Deserialize)]
|
|
||||||
struct Params {
|
|
||||||
pub token: String,
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn get_fcm(
|
|
||||||
app_state: &web::Data<AppState>,
|
|
||||||
user_data: &UserExtractor<true>,
|
|
||||||
token: String,
|
|
||||||
) -> Result<FCM, diesel::result::Error> {
|
|
||||||
match user_data.fcm() {
|
|
||||||
Some(fcm) => {
|
|
||||||
let mut fcm = fcm.clone();
|
|
||||||
fcm.token = token;
|
|
||||||
|
|
||||||
Ok(fcm)
|
|
||||||
}
|
|
||||||
None => {
|
|
||||||
let fcm = FCM {
|
|
||||||
user_id: user_data.user().id.clone(),
|
|
||||||
token,
|
|
||||||
topics: vec![],
|
|
||||||
};
|
|
||||||
|
|
||||||
match app_state.database.scope(|conn| {
|
|
||||||
diesel::insert_into(database::schema::fcm::table)
|
|
||||||
.values(&fcm)
|
|
||||||
.execute(conn)
|
|
||||||
}) {
|
|
||||||
Ok(_) => Ok(fcm),
|
|
||||||
Err(e) => Err(e),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[utoipa::path(responses((status = OK)))]
|
|
||||||
#[patch("/set-token")]
|
|
||||||
pub async fn set_token(
|
|
||||||
app_state: web::Data<AppState>,
|
|
||||||
web::Query(params): web::Query<Params>,
|
|
||||||
user_data: SyncExtractor<UserExtractor<true>>,
|
|
||||||
) -> impl Responder {
|
|
||||||
let user_data = user_data.into_inner();
|
|
||||||
|
|
||||||
// If token not changes - exit.
|
|
||||||
if let Some(fcm) = user_data.fcm() {
|
|
||||||
if fcm.token == params.token {
|
|
||||||
return HttpResponse::Ok();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
let fcm = get_fcm(&app_state, &user_data, params.token.clone()).await;
|
|
||||||
if let Err(e) = fcm {
|
|
||||||
eprintln!("Failed to get FCM: {e}");
|
|
||||||
return HttpResponse::Ok();
|
|
||||||
}
|
|
||||||
|
|
||||||
let mut fcm = fcm.ok().unwrap();
|
|
||||||
|
|
||||||
// Add default topics.
|
|
||||||
if !fcm.topics.contains(&Some("common".to_string())) {
|
|
||||||
fcm.topics.push(Some("common".to_string()));
|
|
||||||
}
|
|
||||||
|
|
||||||
// Subscribe to default topics.
|
|
||||||
if let Some(e) = app_state
|
|
||||||
.fcm_client
|
|
||||||
.as_ref()
|
|
||||||
.unwrap()
|
|
||||||
.async_scope(
|
|
||||||
async |client: &mut FCMClient| -> Result<(), TopicManagementError> {
|
|
||||||
let mut tokens: Vec<String> = Vec::new();
|
|
||||||
tokens.push(fcm.token.clone());
|
|
||||||
|
|
||||||
for topic in fcm.topics.clone() {
|
|
||||||
if let Some(topic) = topic {
|
|
||||||
client.register_tokens_to_topic(topic.clone(), tokens.clone()).await?;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
},
|
|
||||||
)
|
|
||||||
.await
|
|
||||||
.err()
|
|
||||||
{
|
|
||||||
eprintln!("Failed to subscribe token to topic: {:?}", e);
|
|
||||||
return HttpResponse::Ok();
|
|
||||||
}
|
|
||||||
|
|
||||||
// Write updates to db.
|
|
||||||
if let Some(e) = app_state
|
|
||||||
.database
|
|
||||||
.scope(|conn| fcm.save_changes::<FCM>(conn))
|
|
||||||
.err()
|
|
||||||
{
|
|
||||||
eprintln!("Failed to update FCM object: {e}");
|
|
||||||
}
|
|
||||||
|
|
||||||
HttpResponse::Ok()
|
|
||||||
}
|
|
||||||
@@ -1,32 +0,0 @@
|
|||||||
use crate::app_state::AppState;
|
|
||||||
use crate::database::models::User;
|
|
||||||
use crate::extractors::base::SyncExtractor;
|
|
||||||
use crate::utility::mutex::MutexScope;
|
|
||||||
use actix_web::{HttpResponse, Responder, post, web};
|
|
||||||
use diesel::SaveChangesDsl;
|
|
||||||
|
|
||||||
#[utoipa::path(responses(
|
|
||||||
(status = OK),
|
|
||||||
(status = INTERNAL_SERVER_ERROR)
|
|
||||||
))]
|
|
||||||
#[post("/update-callback/{version}")]
|
|
||||||
async fn update_callback(
|
|
||||||
app_state: web::Data<AppState>,
|
|
||||||
version: web::Path<String>,
|
|
||||||
user: SyncExtractor<User>,
|
|
||||||
) -> impl Responder {
|
|
||||||
let mut user = user.into_inner();
|
|
||||||
|
|
||||||
user.version = version.into_inner();
|
|
||||||
|
|
||||||
match app_state
|
|
||||||
.database
|
|
||||||
.scope(|conn| user.save_changes::<User>(conn))
|
|
||||||
{
|
|
||||||
Ok(_) => HttpResponse::Ok(),
|
|
||||||
Err(e) => {
|
|
||||||
eprintln!("Failed to update user: {}", e);
|
|
||||||
HttpResponse::InternalServerError()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
5
src/routes/flow/mod.rs
Normal file
5
src/routes/flow/mod.rs
Normal file
@@ -0,0 +1,5 @@
|
|||||||
|
mod telegram_auth;
|
||||||
|
mod telegram_complete;
|
||||||
|
|
||||||
|
pub use telegram_auth::*;
|
||||||
|
pub use telegram_complete::*;
|
||||||
173
src/routes/flow/telegram_auth.rs
Normal file
173
src/routes/flow/telegram_auth.rs
Normal file
@@ -0,0 +1,173 @@
|
|||||||
|
use self::schema::*;
|
||||||
|
use crate::routes::schema::ResponseError;
|
||||||
|
use crate::utility::telegram::{WebAppInitDataMap, WebAppUser};
|
||||||
|
use crate::{utility, AppState};
|
||||||
|
use actix_web::{post, web};
|
||||||
|
use chrono::{DateTime, Duration, Utc};
|
||||||
|
use database::entity::sea_orm_active_enums::UserRole;
|
||||||
|
use database::entity::{ActiveUser, UserType};
|
||||||
|
use database::query::Query;
|
||||||
|
use database::sea_orm::{ActiveModelTrait, Set};
|
||||||
|
use objectid::ObjectId;
|
||||||
|
use std::sync::Arc;
|
||||||
|
use web::Json;
|
||||||
|
|
||||||
|
#[utoipa::path(responses(
|
||||||
|
(status = OK, body = Response),
|
||||||
|
(status = UNAUTHORIZED, body = ResponseError<ErrorCode>),
|
||||||
|
))]
|
||||||
|
#[post("/telegram-auth")]
|
||||||
|
pub async fn telegram_auth(
|
||||||
|
data_json: Json<Request>,
|
||||||
|
app_state: web::Data<AppState>,
|
||||||
|
) -> ServiceResponse {
|
||||||
|
let init_data = WebAppInitDataMap::from_str(data_json.into_inner().init_data);
|
||||||
|
|
||||||
|
{
|
||||||
|
let env = &app_state.get_env().telegram;
|
||||||
|
|
||||||
|
if let Err(error) = init_data.verify(env.bot_id, env.test_dc) {
|
||||||
|
return Err(ErrorCode::InvalidInitData(Arc::new(error))).into();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let auth_date = DateTime::<Utc>::from_timestamp(
|
||||||
|
init_data
|
||||||
|
.data_map
|
||||||
|
.get("auth_date")
|
||||||
|
.unwrap()
|
||||||
|
.parse()
|
||||||
|
.unwrap(),
|
||||||
|
0,
|
||||||
|
)
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
|
if Utc::now() - auth_date > Duration::minutes(5) {
|
||||||
|
return Err(ErrorCode::ExpiredInitData).into();
|
||||||
|
}
|
||||||
|
|
||||||
|
let web_app_user =
|
||||||
|
serde_json::from_str::<WebAppUser>(init_data.data_map.get("user").unwrap()).unwrap();
|
||||||
|
|
||||||
|
let user = match Query::find_user_by_telegram_id(app_state.get_database(), web_app_user.id)
|
||||||
|
.await
|
||||||
|
.expect("Failed to find user by telegram id")
|
||||||
|
{
|
||||||
|
Some(value) => value,
|
||||||
|
None => {
|
||||||
|
let new_user = ActiveUser {
|
||||||
|
id: Set(ObjectId::new().unwrap().to_string()),
|
||||||
|
username: Set(format!("telegram_{}", web_app_user.id)), // можно оставить, а можно поменять
|
||||||
|
password: Set(None), // ибо нехуй
|
||||||
|
vk_id: Set(None),
|
||||||
|
telegram_id: Set(Some(web_app_user.id)),
|
||||||
|
group: Set(None),
|
||||||
|
role: Set(UserRole::Student), // TODO: при реге проверять данные
|
||||||
|
android_version: Set(None),
|
||||||
|
};
|
||||||
|
|
||||||
|
new_user
|
||||||
|
.insert(app_state.get_database())
|
||||||
|
.await
|
||||||
|
.expect("Failed to insert user")
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
let access_token = utility::jwt::encode(UserType::Default, &user.id);
|
||||||
|
Ok(Response::new(&access_token, user.group.is_some())).into()
|
||||||
|
}
|
||||||
|
|
||||||
|
mod schema {
|
||||||
|
use crate::routes::schema::PartialOkResponse;
|
||||||
|
use crate::state::AppState;
|
||||||
|
use crate::utility::telegram::VerifyError;
|
||||||
|
use actix_macros::ErrResponse;
|
||||||
|
use actix_web::body::EitherBody;
|
||||||
|
use actix_web::cookie::time::OffsetDateTime;
|
||||||
|
use actix_web::cookie::CookieBuilder;
|
||||||
|
use actix_web::{web, HttpRequest, HttpResponse};
|
||||||
|
use derive_more::Display;
|
||||||
|
use serde::{Deserialize, Serialize, Serializer};
|
||||||
|
use std::ops::Add;
|
||||||
|
use std::sync::Arc;
|
||||||
|
use utoipa::ToSchema;
|
||||||
|
|
||||||
|
#[derive(Debug, Deserialize, Serialize, ToSchema)]
|
||||||
|
#[serde(rename_all = "camelCase")]
|
||||||
|
#[schema(as = Flow::TelegramAuth::Request)]
|
||||||
|
pub struct Request {
|
||||||
|
/// Telegram WebApp init data.
|
||||||
|
pub init_data: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Serialize, ToSchema)]
|
||||||
|
#[serde(rename_all = "camelCase")]
|
||||||
|
#[schema(as = Flow::TelegramAuth::Response)]
|
||||||
|
pub struct Response {
|
||||||
|
// #[serde(skip)] // TODO: я пока не придумал как не отдавать сырой токен в ответе
|
||||||
|
// #[schema(ignore)]
|
||||||
|
access_token: String,
|
||||||
|
|
||||||
|
pub completed: bool,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Response {
|
||||||
|
pub fn new(access_token: &str, completed: bool) -> Self {
|
||||||
|
Self {
|
||||||
|
access_token: access_token.to_string(),
|
||||||
|
completed,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl PartialOkResponse for Response {
|
||||||
|
fn post_process(
|
||||||
|
&mut self,
|
||||||
|
request: &HttpRequest,
|
||||||
|
response: &mut HttpResponse<EitherBody<String>>,
|
||||||
|
) {
|
||||||
|
let access_token = &self.access_token;
|
||||||
|
|
||||||
|
let app_state = request.app_data::<web::Data<AppState>>().unwrap();
|
||||||
|
let mini_app_host = &*app_state.get_env().telegram.mini_app_host;
|
||||||
|
|
||||||
|
let cookie = CookieBuilder::new("access_token", access_token)
|
||||||
|
.domain(mini_app_host)
|
||||||
|
.path("/")
|
||||||
|
.expires(
|
||||||
|
OffsetDateTime::now_utc().add(std::time::Duration::from_secs(60 * 60 * 24 * 7)),
|
||||||
|
)
|
||||||
|
.http_only(true)
|
||||||
|
.secure(true)
|
||||||
|
.finish();
|
||||||
|
|
||||||
|
response.add_cookie(&cookie).unwrap();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub type ServiceResponse = crate::routes::schema::Response<Response, ErrorCode>;
|
||||||
|
|
||||||
|
#[derive(Clone, ToSchema, Display, ErrResponse)]
|
||||||
|
#[status_code = "actix_web::http::StatusCode::UNAUTHORIZED"]
|
||||||
|
#[schema(as = Flow::TelegramAuth::ErrorCode)]
|
||||||
|
pub enum ErrorCode {
|
||||||
|
#[display("Invalid init data provided: {_0}")]
|
||||||
|
#[schema(value_type = String)]
|
||||||
|
InvalidInitData(Arc<VerifyError>),
|
||||||
|
|
||||||
|
#[display("Expired init data provided.")]
|
||||||
|
ExpiredInitData,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Serialize for ErrorCode {
|
||||||
|
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
|
||||||
|
where
|
||||||
|
S: Serializer,
|
||||||
|
{
|
||||||
|
match self {
|
||||||
|
ErrorCode::InvalidInitData(_) => serializer.serialize_str("INVALID_INIT_DATA"),
|
||||||
|
ErrorCode::ExpiredInitData => serializer.serialize_str("EXPIRED_INIT_DATA"),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
101
src/routes/flow/telegram_complete.rs
Normal file
101
src/routes/flow/telegram_complete.rs
Normal file
@@ -0,0 +1,101 @@
|
|||||||
|
use self::schema::*;
|
||||||
|
use crate::extractors::base::AsyncExtractor;
|
||||||
|
use crate::routes::schema::ResponseError;
|
||||||
|
use crate::AppState;
|
||||||
|
use actix_web::{post, web};
|
||||||
|
use database::entity::User;
|
||||||
|
use database::query::Query;
|
||||||
|
use database::sea_orm::{ActiveModelTrait, IntoActiveModel, Set};
|
||||||
|
use web::Json;
|
||||||
|
|
||||||
|
#[utoipa::path(responses(
|
||||||
|
(status = OK),
|
||||||
|
(status = CONFLICT, body = ResponseError<ErrorCode>),
|
||||||
|
(status = INTERNAL_SERVER_ERROR, body = ResponseError<ErrorCode>),
|
||||||
|
(status = BAD_REQUEST, body = ResponseError<ErrorCode>)
|
||||||
|
))]
|
||||||
|
#[post("/telegram-complete")]
|
||||||
|
pub async fn telegram_complete(
|
||||||
|
data: Json<Request>,
|
||||||
|
app_state: web::Data<AppState>,
|
||||||
|
user: AsyncExtractor<User>,
|
||||||
|
) -> ServiceResponse {
|
||||||
|
let user = user.into_inner();
|
||||||
|
|
||||||
|
// проверка на перезапись уже имеющихся данных
|
||||||
|
if user.group.is_some() {
|
||||||
|
return Err(ErrorCode::AlreadyCompleted).into();
|
||||||
|
}
|
||||||
|
|
||||||
|
let data = data.into_inner();
|
||||||
|
|
||||||
|
let db = app_state.get_database();
|
||||||
|
let mut active_user = user.clone().into_active_model();
|
||||||
|
|
||||||
|
// замена существующего имени, если оно отличается
|
||||||
|
if user.username != data.username {
|
||||||
|
if Query::is_user_exists_by_username(db, &data.username)
|
||||||
|
.await
|
||||||
|
.unwrap()
|
||||||
|
{
|
||||||
|
return Err(ErrorCode::UsernameAlreadyExists).into();
|
||||||
|
}
|
||||||
|
|
||||||
|
active_user.username = Set(data.username);
|
||||||
|
}
|
||||||
|
|
||||||
|
// проверка на существование группы
|
||||||
|
if !app_state
|
||||||
|
.get_schedule_snapshot("eng_polytechnic")
|
||||||
|
.await
|
||||||
|
.unwrap()
|
||||||
|
.data
|
||||||
|
.groups
|
||||||
|
.contains_key(&data.group)
|
||||||
|
{
|
||||||
|
return Err(ErrorCode::InvalidGroupName).into();
|
||||||
|
}
|
||||||
|
|
||||||
|
active_user.group = Set(Some(data.group));
|
||||||
|
|
||||||
|
active_user.update(db).await.expect("Failed to update user");
|
||||||
|
|
||||||
|
Ok(()).into()
|
||||||
|
}
|
||||||
|
|
||||||
|
mod schema {
|
||||||
|
use actix_macros::ErrResponse;
|
||||||
|
use derive_more::Display;
|
||||||
|
use serde::{Deserialize, Serialize};
|
||||||
|
use utoipa::ToSchema;
|
||||||
|
|
||||||
|
#[derive(Debug, Deserialize, Serialize, ToSchema)]
|
||||||
|
#[schema(as = Flow::TelegramFill::Request)]
|
||||||
|
pub struct Request {
|
||||||
|
/// Username.
|
||||||
|
pub username: String,
|
||||||
|
|
||||||
|
/// Group.
|
||||||
|
pub group: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
pub type ServiceResponse = crate::routes::schema::Response<(), ErrorCode>;
|
||||||
|
|
||||||
|
#[derive(Clone, Serialize, Display, ToSchema, ErrResponse)]
|
||||||
|
#[status_code = "actix_web::http::StatusCode::UNAUTHORIZED"]
|
||||||
|
#[serde(rename_all = "SCREAMING_SNAKE_CASE")]
|
||||||
|
#[schema(as = Flow::TelegramFill::ErrorCode)]
|
||||||
|
pub enum ErrorCode {
|
||||||
|
#[display("This flow is already completed.")]
|
||||||
|
#[status_code = "actix_web::http::StatusCode::CONFLICT"]
|
||||||
|
AlreadyCompleted,
|
||||||
|
|
||||||
|
#[display("User with that name already exists.")]
|
||||||
|
#[status_code = "actix_web::http::StatusCode::BAD_REQUEST"]
|
||||||
|
UsernameAlreadyExists,
|
||||||
|
|
||||||
|
#[display("The required group does not exist.")]
|
||||||
|
#[status_code = "actix_web::http::StatusCode::BAD_REQUEST"]
|
||||||
|
InvalidGroupName,
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -1,5 +1,6 @@
|
|||||||
|
pub mod admin;
|
||||||
pub mod auth;
|
pub mod auth;
|
||||||
pub mod fcm;
|
pub mod flow;
|
||||||
pub mod schedule;
|
pub mod schedule;
|
||||||
mod schema;
|
mod schema;
|
||||||
pub mod users;
|
pub mod users;
|
||||||
|
|||||||
@@ -1,23 +1,17 @@
|
|||||||
use crate::AppState;
|
|
||||||
use crate::routes::schedule::schema::CacheStatus;
|
use crate::routes::schedule::schema::CacheStatus;
|
||||||
|
use crate::AppState;
|
||||||
use actix_web::{get, web};
|
use actix_web::{get, web};
|
||||||
|
use std::ops::Deref;
|
||||||
|
|
||||||
#[utoipa::path(responses(
|
#[utoipa::path(responses(
|
||||||
(status = OK, body = CacheStatus),
|
(status = OK, body = CacheStatus),
|
||||||
))]
|
))]
|
||||||
#[get("/cache-status")]
|
#[get("/cache-status")]
|
||||||
pub async fn cache_status(app_state: web::Data<AppState>) -> CacheStatus {
|
pub async fn cache_status(app_state: web::Data<AppState>) -> CacheStatus {
|
||||||
// Prevent thread lock
|
app_state
|
||||||
let has_schedule = app_state
|
.get_schedule_snapshot("eng_polytechnic")
|
||||||
.schedule
|
.await
|
||||||
.lock()
|
.unwrap()
|
||||||
.as_ref()
|
.deref()
|
||||||
.map(|res| res.is_some())
|
|
||||||
.unwrap();
|
|
||||||
|
|
||||||
match has_schedule {
|
|
||||||
true => CacheStatus::from(&app_state),
|
|
||||||
false => CacheStatus::default(),
|
|
||||||
}
|
|
||||||
.into()
|
.into()
|
||||||
}
|
}
|
||||||
|
|||||||
9
src/routes/schedule/get.rs
Normal file
9
src/routes/schedule/get.rs
Normal file
@@ -0,0 +1,9 @@
|
|||||||
|
use crate::routes::schedule::schema::ScheduleView;
|
||||||
|
use crate::state::AppState;
|
||||||
|
use actix_web::{get, web};
|
||||||
|
|
||||||
|
#[utoipa::path(responses((status = OK, body = ScheduleView)))]
|
||||||
|
#[get("/")]
|
||||||
|
pub async fn schedule(app_state: web::Data<AppState>) -> ScheduleView {
|
||||||
|
ScheduleView::from(&app_state).await
|
||||||
|
}
|
||||||
@@ -1,12 +1,13 @@
|
|||||||
use self::schema::*;
|
use self::schema::*;
|
||||||
use crate::AppState;
|
use crate::AppState;
|
||||||
use crate::database::models::User;
|
use crate::extractors::base::AsyncExtractor;
|
||||||
use crate::extractors::base::SyncExtractor;
|
use crate::routes::schedule::schema::ScheduleEntryResponse;
|
||||||
use crate::routes::schema::{IntoResponseAsError, ResponseError};
|
use crate::routes::schema::ResponseError;
|
||||||
use actix_web::{get, web};
|
use actix_web::{get, web};
|
||||||
|
use database::entity::User;
|
||||||
|
|
||||||
#[utoipa::path(responses(
|
#[utoipa::path(responses(
|
||||||
(status = OK, body = Response),
|
(status = OK, body = ScheduleEntryResponse),
|
||||||
(
|
(
|
||||||
status = SERVICE_UNAVAILABLE,
|
status = SERVICE_UNAVAILABLE,
|
||||||
body = ResponseError<ErrorCode>,
|
body = ResponseError<ErrorCode>,
|
||||||
@@ -25,68 +26,43 @@ use actix_web::{get, web};
|
|||||||
),
|
),
|
||||||
))]
|
))]
|
||||||
#[get("/group")]
|
#[get("/group")]
|
||||||
pub async fn group(user: SyncExtractor<User>, app_state: web::Data<AppState>) -> ServiceResponse {
|
pub async fn group(user: AsyncExtractor<User>, app_state: web::Data<AppState>) -> ServiceResponse {
|
||||||
// Prevent thread lock
|
match &user.into_inner().group {
|
||||||
let schedule_lock = app_state.schedule.lock().unwrap();
|
None => Err(ErrorCode::SignUpNotCompleted),
|
||||||
|
|
||||||
match schedule_lock.as_ref() {
|
Some(group) => match app_state
|
||||||
None => ErrorCode::NoSchedule.into_response(),
|
.get_schedule_snapshot("eng_polytechnic")
|
||||||
Some(schedule) => match schedule.data.groups.get(&user.into_inner().group) {
|
.await
|
||||||
None => ErrorCode::NotFound.into_response(),
|
.unwrap()
|
||||||
Some(entry) => Ok(entry.clone().into()).into(),
|
.data
|
||||||
|
.groups
|
||||||
|
.get(group)
|
||||||
|
{
|
||||||
|
None => Err(ErrorCode::NotFound),
|
||||||
|
|
||||||
|
Some(entry) => Ok(entry.clone().into()),
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
.into()
|
||||||
}
|
}
|
||||||
|
|
||||||
mod schema {
|
mod schema {
|
||||||
use crate::parser::schema::ScheduleEntry;
|
use crate::routes::schedule::schema::ScheduleEntryResponse;
|
||||||
use actix_macros::{IntoResponseErrorNamed, StatusCode};
|
use actix_macros::ErrResponse;
|
||||||
use chrono::{DateTime, NaiveDateTime, Utc};
|
|
||||||
use derive_more::Display;
|
use derive_more::Display;
|
||||||
use serde::Serialize;
|
use serde::Serialize;
|
||||||
use utoipa::ToSchema;
|
use utoipa::ToSchema;
|
||||||
|
|
||||||
pub type ServiceResponse = crate::routes::schema::Response<Response, ErrorCode>;
|
pub type ServiceResponse = crate::routes::schema::Response<ScheduleEntryResponse, ErrorCode>;
|
||||||
|
|
||||||
#[derive(Serialize, ToSchema)]
|
#[derive(Clone, Serialize, Display, ToSchema, ErrResponse)]
|
||||||
#[schema(as = GetGroup::Response)]
|
|
||||||
#[serde(rename_all = "camelCase")]
|
|
||||||
pub struct Response {
|
|
||||||
/// Group schedule.
|
|
||||||
pub group: ScheduleEntry,
|
|
||||||
|
|
||||||
/// ## Outdated variable.
|
|
||||||
///
|
|
||||||
/// By default, an empty list is returned.
|
|
||||||
#[deprecated = "Will be removed in future versions"]
|
|
||||||
pub updated: Vec<i32>,
|
|
||||||
|
|
||||||
/// ## Outdated variable.
|
|
||||||
///
|
|
||||||
/// By default, the initial date for unix.
|
|
||||||
#[deprecated = "Will be removed in future versions"]
|
|
||||||
pub updated_at: DateTime<Utc>,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[allow(deprecated)]
|
|
||||||
impl From<ScheduleEntry> for Response {
|
|
||||||
fn from(group: ScheduleEntry) -> Self {
|
|
||||||
Self {
|
|
||||||
group,
|
|
||||||
updated: Vec::new(),
|
|
||||||
updated_at: NaiveDateTime::default().and_utc(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Clone, Serialize, ToSchema, StatusCode, Display, IntoResponseErrorNamed)]
|
|
||||||
#[serde(rename_all = "SCREAMING_SNAKE_CASE")]
|
#[serde(rename_all = "SCREAMING_SNAKE_CASE")]
|
||||||
#[schema(as = GroupSchedule::ErrorCode)]
|
#[schema(as = GroupSchedule::ErrorCode)]
|
||||||
pub enum ErrorCode {
|
pub enum ErrorCode {
|
||||||
/// Schedules have not yet been parsed.
|
/// The user tried to access the API without completing singing up.
|
||||||
#[status_code = "actix_web::http::StatusCode::SERVICE_UNAVAILABLE"]
|
#[status_code = "actix_web::http::StatusCode::FORBIDDEN"]
|
||||||
#[display("Schedule not parsed yet.")]
|
#[display("You have not completed signing up.")]
|
||||||
NoSchedule,
|
SignUpNotCompleted,
|
||||||
|
|
||||||
/// Group not found.
|
/// Group not found.
|
||||||
#[status_code = "actix_web::http::StatusCode::NOT_FOUND"]
|
#[status_code = "actix_web::http::StatusCode::NOT_FOUND"]
|
||||||
|
|||||||
65
src/routes/schedule/group_by_name.rs
Normal file
65
src/routes/schedule/group_by_name.rs
Normal file
@@ -0,0 +1,65 @@
|
|||||||
|
use self::schema::*;
|
||||||
|
use crate::routes::schedule::schema::ScheduleEntryResponse;
|
||||||
|
use crate::routes::schema::ResponseError;
|
||||||
|
use crate::AppState;
|
||||||
|
use actix_web::{get, web};
|
||||||
|
|
||||||
|
#[utoipa::path(responses(
|
||||||
|
(status = OK, body = ScheduleEntryResponse),
|
||||||
|
(
|
||||||
|
status = SERVICE_UNAVAILABLE,
|
||||||
|
body = ResponseError<ErrorCode>,
|
||||||
|
example = json!({
|
||||||
|
"code": "NO_SCHEDULE",
|
||||||
|
"message": "Schedule not parsed yet."
|
||||||
|
})
|
||||||
|
),
|
||||||
|
(
|
||||||
|
status = NOT_FOUND,
|
||||||
|
body = ResponseError<ErrorCode>,
|
||||||
|
example = json!({
|
||||||
|
"code": "NOT_FOUND",
|
||||||
|
"message": "Required group not found."
|
||||||
|
})
|
||||||
|
),
|
||||||
|
))]
|
||||||
|
#[get("/group/{group_name}")]
|
||||||
|
pub async fn group_by_name(
|
||||||
|
path: web::Path<String>,
|
||||||
|
app_state: web::Data<AppState>,
|
||||||
|
) -> ServiceResponse {
|
||||||
|
let group_name = path.into_inner();
|
||||||
|
|
||||||
|
match app_state
|
||||||
|
.get_schedule_snapshot("eng_polytechnic")
|
||||||
|
.await
|
||||||
|
.unwrap()
|
||||||
|
.data
|
||||||
|
.groups
|
||||||
|
.get(&group_name)
|
||||||
|
{
|
||||||
|
None => Err(ErrorCode::NotFound),
|
||||||
|
Some(entry) => Ok(entry.clone().into()),
|
||||||
|
}
|
||||||
|
.into()
|
||||||
|
}
|
||||||
|
|
||||||
|
mod schema {
|
||||||
|
use crate::routes::schedule::schema::ScheduleEntryResponse;
|
||||||
|
use actix_macros::ErrResponse;
|
||||||
|
use derive_more::Display;
|
||||||
|
use serde::Serialize;
|
||||||
|
use utoipa::ToSchema;
|
||||||
|
|
||||||
|
pub type ServiceResponse = crate::routes::schema::Response<ScheduleEntryResponse, ErrorCode>;
|
||||||
|
|
||||||
|
#[derive(Clone, Serialize, Display, ToSchema, ErrResponse)]
|
||||||
|
#[serde(rename_all = "SCREAMING_SNAKE_CASE")]
|
||||||
|
#[schema(as = GroupByNameSchedule::ErrorCode)]
|
||||||
|
pub enum ErrorCode {
|
||||||
|
/// Group not found.
|
||||||
|
#[status_code = "actix_web::http::StatusCode::NOT_FOUND"]
|
||||||
|
#[display("Required group not found.")]
|
||||||
|
NotFound,
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -1,48 +1,35 @@
|
|||||||
use self::schema::*;
|
use self::schema::*;
|
||||||
use crate::AppState;
|
use crate::AppState;
|
||||||
use crate::routes::schedule::schema::ErrorCode;
|
|
||||||
use crate::routes::schema::{IntoResponseAsError, ResponseError};
|
|
||||||
use actix_web::{get, web};
|
use actix_web::{get, web};
|
||||||
|
|
||||||
#[utoipa::path(responses(
|
#[utoipa::path(responses((status = OK, body = Response)))]
|
||||||
(status = OK, body = Response),
|
|
||||||
(status = SERVICE_UNAVAILABLE, body = ResponseError<ErrorCode>),
|
|
||||||
))]
|
|
||||||
#[get("/group-names")]
|
#[get("/group-names")]
|
||||||
pub async fn group_names(app_state: web::Data<AppState>) -> ServiceResponse {
|
pub async fn group_names(app_state: web::Data<AppState>) -> Response {
|
||||||
// Prevent thread lock
|
let mut names: Vec<String> = app_state
|
||||||
let schedule_lock = app_state.schedule.lock().unwrap();
|
.get_schedule_snapshot("eng_polytechnic")
|
||||||
|
.await
|
||||||
|
.unwrap()
|
||||||
|
.data
|
||||||
|
.groups
|
||||||
|
.keys()
|
||||||
|
.cloned()
|
||||||
|
.collect();
|
||||||
|
|
||||||
match schedule_lock.as_ref() {
|
|
||||||
None => ErrorCode::NoSchedule.into_response(),
|
|
||||||
Some(schedule) => {
|
|
||||||
let mut names: Vec<String> = schedule.data.groups.keys().cloned().collect();
|
|
||||||
names.sort();
|
names.sort();
|
||||||
|
|
||||||
Ok(names.into()).into()
|
Response { names }
|
||||||
}
|
|
||||||
}
|
|
||||||
.into()
|
|
||||||
}
|
}
|
||||||
|
|
||||||
mod schema {
|
mod schema {
|
||||||
use crate::routes::schedule::schema::ErrorCode;
|
use actix_macros::ResponderJson;
|
||||||
use serde::Serialize;
|
use serde::Serialize;
|
||||||
use utoipa::ToSchema;
|
use utoipa::ToSchema;
|
||||||
|
|
||||||
pub type ServiceResponse = crate::routes::schema::Response<Response, ErrorCode>;
|
#[derive(Serialize, ToSchema, ResponderJson)]
|
||||||
|
|
||||||
#[derive(Serialize, ToSchema)]
|
|
||||||
#[schema(as = GetGroupNames::Response)]
|
#[schema(as = GetGroupNames::Response)]
|
||||||
pub struct Response {
|
pub struct Response {
|
||||||
/// List of group names sorted in alphabetical order.
|
/// List of group names sorted in alphabetical order.
|
||||||
#[schema(examples(json!(["ИС-214/23"])))]
|
#[schema(examples(json!(["ИС-214/23"])))]
|
||||||
pub names: Vec<String>,
|
pub names: Vec<String>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl From<Vec<String>> for Response {
|
|
||||||
fn from(names: Vec<String>) -> Self {
|
|
||||||
Self { names }
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,16 +1,16 @@
|
|||||||
mod cache_status;
|
mod cache_status;
|
||||||
mod group;
|
mod group;
|
||||||
|
mod group_by_name;
|
||||||
mod group_names;
|
mod group_names;
|
||||||
mod schedule;
|
mod get;
|
||||||
|
mod schema;
|
||||||
mod teacher;
|
mod teacher;
|
||||||
mod teacher_names;
|
mod teacher_names;
|
||||||
mod schema;
|
|
||||||
mod update_download_url;
|
|
||||||
|
|
||||||
pub use cache_status::*;
|
pub use cache_status::*;
|
||||||
pub use group::*;
|
pub use group::*;
|
||||||
|
pub use group_by_name::*;
|
||||||
pub use group_names::*;
|
pub use group_names::*;
|
||||||
pub use schedule::*;
|
pub use get::*;
|
||||||
pub use teacher::*;
|
pub use teacher::*;
|
||||||
pub use teacher_names::*;
|
pub use teacher_names::*;
|
||||||
pub use update_download_url::*;
|
|
||||||
|
|||||||
@@ -1,25 +0,0 @@
|
|||||||
use self::schema::*;
|
|
||||||
use crate::app_state::AppState;
|
|
||||||
use crate::routes::schedule::schema::{ErrorCode, ScheduleView};
|
|
||||||
use crate::routes::schema::{IntoResponseAsError, ResponseError};
|
|
||||||
use actix_web::{get, web};
|
|
||||||
|
|
||||||
#[utoipa::path(responses(
|
|
||||||
(status = OK, body = ScheduleView),
|
|
||||||
(status = SERVICE_UNAVAILABLE, body = ResponseError<ErrorCode>)
|
|
||||||
))]
|
|
||||||
#[get("/")]
|
|
||||||
pub async fn schedule(app_state: web::Data<AppState>) -> ServiceResponse {
|
|
||||||
match ScheduleView::try_from(&app_state) {
|
|
||||||
Ok(res) => Ok(res).into(),
|
|
||||||
Err(e) => match e {
|
|
||||||
ErrorCode::NoSchedule => ErrorCode::NoSchedule.into_response(),
|
|
||||||
},
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
mod schema {
|
|
||||||
use crate::routes::schedule::schema::{ErrorCode, ScheduleView};
|
|
||||||
|
|
||||||
pub type ServiceResponse = crate::routes::schema::Response<ScheduleView, ErrorCode>;
|
|
||||||
}
|
|
||||||
@@ -1,25 +1,18 @@
|
|||||||
use crate::app_state::{AppState, Schedule};
|
use crate::state::AppState;
|
||||||
use crate::parser::schema::ScheduleEntry;
|
use actix_macros::{OkResponse, ResponderJson};
|
||||||
use actix_macros::{IntoResponseErrorNamed, ResponderJson, StatusCode};
|
|
||||||
use actix_web::web;
|
use actix_web::web;
|
||||||
use chrono::{DateTime, Duration, Utc};
|
use providers::base::{ScheduleEntry, ScheduleSnapshot};
|
||||||
use derive_more::Display;
|
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
use std::collections::HashMap;
|
use std::collections::HashMap;
|
||||||
|
use std::ops::Deref;
|
||||||
use utoipa::ToSchema;
|
use utoipa::ToSchema;
|
||||||
|
|
||||||
/// Response from schedule server.
|
/// Response from schedule server.
|
||||||
#[derive(Serialize, ToSchema)]
|
#[derive(Serialize, ToSchema, OkResponse, ResponderJson)]
|
||||||
#[serde(rename_all = "camelCase")]
|
#[serde(rename_all = "camelCase")]
|
||||||
pub struct ScheduleView {
|
pub struct ScheduleView {
|
||||||
/// ETag schedules on polytechnic server.
|
/// Url to xls file.
|
||||||
etag: String,
|
url: String,
|
||||||
|
|
||||||
/// Schedule update date on polytechnic website.
|
|
||||||
uploaded_at: DateTime<Utc>,
|
|
||||||
|
|
||||||
/// Date last downloaded from the Polytechnic server.
|
|
||||||
downloaded_at: DateTime<Utc>,
|
|
||||||
|
|
||||||
/// Groups schedule.
|
/// Groups schedule.
|
||||||
groups: HashMap<String, ScheduleEntry>,
|
groups: HashMap<String, ScheduleEntry>,
|
||||||
@@ -28,80 +21,54 @@ pub struct ScheduleView {
|
|||||||
teachers: HashMap<String, ScheduleEntry>,
|
teachers: HashMap<String, ScheduleEntry>,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Clone, Serialize, ToSchema, StatusCode, Display, IntoResponseErrorNamed)]
|
#[derive(Serialize, ToSchema, OkResponse)]
|
||||||
#[status_code = "actix_web::http::StatusCode::SERVICE_UNAVAILABLE"]
|
pub struct ScheduleEntryResponse(ScheduleEntry);
|
||||||
#[serde(rename_all = "SCREAMING_SNAKE_CASE")]
|
|
||||||
#[schema(as = ScheduleShared::ErrorCode)]
|
impl From<ScheduleEntry> for ScheduleEntryResponse {
|
||||||
pub enum ErrorCode {
|
fn from(value: ScheduleEntry) -> Self {
|
||||||
/// Schedules not yet parsed.
|
Self(value)
|
||||||
#[display("Schedule not parsed yet.")]
|
}
|
||||||
NoSchedule,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl TryFrom<&web::Data<AppState>> for ScheduleView {
|
impl ScheduleView {
|
||||||
type Error = ErrorCode;
|
pub async fn from(app_state: &web::Data<AppState>) -> Self {
|
||||||
|
let schedule = app_state
|
||||||
|
.get_schedule_snapshot("eng_polytechnic")
|
||||||
|
.await
|
||||||
|
.unwrap()
|
||||||
|
.deref()
|
||||||
|
.clone();
|
||||||
|
|
||||||
fn try_from(app_state: &web::Data<AppState>) -> Result<Self, Self::Error> {
|
Self {
|
||||||
if let Some(schedule) = app_state.schedule.lock().unwrap().clone() {
|
url: schedule.url,
|
||||||
Ok(Self {
|
|
||||||
etag: schedule.etag,
|
|
||||||
uploaded_at: schedule.updated_at,
|
|
||||||
downloaded_at: schedule.parsed_at,
|
|
||||||
groups: schedule.data.groups,
|
groups: schedule.data.groups,
|
||||||
teachers: schedule.data.teachers,
|
teachers: schedule.data.teachers,
|
||||||
})
|
|
||||||
} else {
|
|
||||||
Err(ErrorCode::NoSchedule)
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Cached schedule status.
|
/// Cached schedule status.
|
||||||
#[derive(Serialize, Deserialize, ToSchema, ResponderJson)]
|
#[derive(Serialize, Deserialize, ToSchema, ResponderJson, OkResponse)]
|
||||||
#[serde(rename_all = "camelCase")]
|
#[serde(rename_all = "camelCase")]
|
||||||
pub struct CacheStatus {
|
pub struct CacheStatus {
|
||||||
/// Schedule hash.
|
/// Schedule hash.
|
||||||
pub cache_hash: String,
|
pub hash: String,
|
||||||
|
|
||||||
/// Whether the schedule reference needs to be updated.
|
|
||||||
pub cache_update_required: bool,
|
|
||||||
|
|
||||||
/// Last cache update date.
|
/// Last cache update date.
|
||||||
pub last_cache_update: i64,
|
pub fetched_at: i64,
|
||||||
|
|
||||||
/// Cached schedule update date.
|
/// Cached schedule update date.
|
||||||
///
|
///
|
||||||
/// Determined by the polytechnic's server.
|
/// Determined by the polytechnic's server.
|
||||||
pub last_schedule_update: i64,
|
pub updated_at: i64,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl CacheStatus {
|
impl From<&ScheduleSnapshot> for CacheStatus {
|
||||||
pub fn default() -> Self {
|
fn from(value: &ScheduleSnapshot) -> Self {
|
||||||
CacheStatus {
|
|
||||||
cache_hash: "0000000000000000000000000000000000000000".to_string(),
|
|
||||||
cache_update_required: true,
|
|
||||||
last_cache_update: 0,
|
|
||||||
last_schedule_update: 0,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl From<&web::Data<AppState>> for CacheStatus {
|
|
||||||
fn from(value: &web::Data<AppState>) -> Self {
|
|
||||||
let schedule_lock = value.schedule.lock().unwrap();
|
|
||||||
let schedule = schedule_lock.as_ref().unwrap();
|
|
||||||
|
|
||||||
CacheStatus::from(schedule)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl From<&Schedule> for CacheStatus {
|
|
||||||
fn from(value: &Schedule) -> Self {
|
|
||||||
Self {
|
Self {
|
||||||
cache_hash: value.hash(),
|
hash: value.hash(),
|
||||||
cache_update_required: (value.fetched_at - Utc::now()) > Duration::minutes(5),
|
fetched_at: value.fetched_at.timestamp(),
|
||||||
last_cache_update: value.fetched_at.timestamp(),
|
updated_at: value.updated_at.timestamp(),
|
||||||
last_schedule_update: value.updated_at.timestamp(),
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,18 +1,11 @@
|
|||||||
use self::schema::*;
|
use self::schema::*;
|
||||||
use crate::routes::schema::{IntoResponseAsError, ResponseError};
|
|
||||||
use crate::AppState;
|
use crate::AppState;
|
||||||
|
use crate::routes::schema::ResponseError;
|
||||||
use actix_web::{get, web};
|
use actix_web::{get, web};
|
||||||
|
use providers::base::ScheduleEntry;
|
||||||
|
|
||||||
#[utoipa::path(responses(
|
#[utoipa::path(responses(
|
||||||
(status = OK, body = Response),
|
(status = OK, body = ScheduleEntry),
|
||||||
(
|
|
||||||
status = SERVICE_UNAVAILABLE,
|
|
||||||
body = ResponseError<ErrorCode>,
|
|
||||||
example = json!({
|
|
||||||
"code": "NO_SCHEDULE",
|
|
||||||
"message": "Schedule not parsed yet."
|
|
||||||
})
|
|
||||||
),
|
|
||||||
(
|
(
|
||||||
status = NOT_FOUND,
|
status = NOT_FOUND,
|
||||||
body = ResponseError<ErrorCode>,
|
body = ResponseError<ErrorCode>,
|
||||||
@@ -23,72 +16,35 @@ use actix_web::{get, web};
|
|||||||
),
|
),
|
||||||
))]
|
))]
|
||||||
#[get("/teacher/{name}")]
|
#[get("/teacher/{name}")]
|
||||||
pub async fn teacher(
|
pub async fn teacher(name: web::Path<String>, app_state: web::Data<AppState>) -> ServiceResponse {
|
||||||
name: web::Path<String>,
|
match app_state
|
||||||
app_state: web::Data<AppState>,
|
.get_schedule_snapshot("eng_polytechnic")
|
||||||
) -> ServiceResponse {
|
.await
|
||||||
// Prevent thread lock
|
.unwrap()
|
||||||
let schedule_lock = app_state.schedule.lock().unwrap();
|
.data
|
||||||
|
.teachers
|
||||||
|
.get(&name.into_inner())
|
||||||
|
{
|
||||||
|
None => Err(ErrorCode::NotFound),
|
||||||
|
|
||||||
match schedule_lock.as_ref() {
|
Some(entry) => Ok(entry.clone().into()),
|
||||||
None => ErrorCode::NoSchedule.into_response(),
|
|
||||||
Some(schedule) => match schedule.data.teachers.get(&name.into_inner()) {
|
|
||||||
None => ErrorCode::NotFound.into_response(),
|
|
||||||
Some(entry) => Ok(entry.clone().into()).into(),
|
|
||||||
},
|
|
||||||
}
|
}
|
||||||
|
.into()
|
||||||
}
|
}
|
||||||
|
|
||||||
mod schema {
|
mod schema {
|
||||||
use crate::parser::schema::ScheduleEntry;
|
use crate::routes::schedule::schema::ScheduleEntryResponse;
|
||||||
use actix_macros::{IntoResponseErrorNamed, StatusCode};
|
use actix_macros::ErrResponse;
|
||||||
use chrono::{DateTime, NaiveDateTime, Utc};
|
|
||||||
use derive_more::Display;
|
use derive_more::Display;
|
||||||
use serde::Serialize;
|
use serde::Serialize;
|
||||||
use utoipa::ToSchema;
|
use utoipa::ToSchema;
|
||||||
|
|
||||||
pub type ServiceResponse = crate::routes::schema::Response<Response, ErrorCode>;
|
pub type ServiceResponse = crate::routes::schema::Response<ScheduleEntryResponse, ErrorCode>;
|
||||||
|
|
||||||
#[derive(Serialize, ToSchema)]
|
#[derive(Clone, Serialize, Display, ToSchema, ErrResponse)]
|
||||||
#[schema(as = GetTeacher::Response)]
|
|
||||||
#[serde(rename_all = "camelCase")]
|
|
||||||
pub struct Response {
|
|
||||||
/// Teacher's schedule.
|
|
||||||
pub teacher: ScheduleEntry,
|
|
||||||
|
|
||||||
/// ## Deprecated variable.
|
|
||||||
///
|
|
||||||
/// By default, an empty list is returned.
|
|
||||||
#[deprecated = "Will be removed in future versions"]
|
|
||||||
pub updated: Vec<i32>,
|
|
||||||
|
|
||||||
/// ## Deprecated variable.
|
|
||||||
///
|
|
||||||
/// Defaults to the Unix start date.
|
|
||||||
#[deprecated = "Will be removed in future versions"]
|
|
||||||
pub updated_at: DateTime<Utc>,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[allow(deprecated)]
|
|
||||||
impl From<ScheduleEntry> for Response {
|
|
||||||
fn from(teacher: ScheduleEntry) -> Self {
|
|
||||||
Self {
|
|
||||||
teacher,
|
|
||||||
updated: Vec::new(),
|
|
||||||
updated_at: NaiveDateTime::default().and_utc(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Clone, Serialize, ToSchema, StatusCode, Display, IntoResponseErrorNamed)]
|
|
||||||
#[serde(rename_all = "SCREAMING_SNAKE_CASE")]
|
#[serde(rename_all = "SCREAMING_SNAKE_CASE")]
|
||||||
#[schema(as = TeacherSchedule::ErrorCode)]
|
#[schema(as = TeacherSchedule::ErrorCode)]
|
||||||
pub enum ErrorCode {
|
pub enum ErrorCode {
|
||||||
/// Schedules have not yet been parsed.
|
|
||||||
#[status_code = "actix_web::http::StatusCode::SERVICE_UNAVAILABLE"]
|
|
||||||
#[display("Schedule not parsed yet.")]
|
|
||||||
NoSchedule,
|
|
||||||
|
|
||||||
/// Teacher not found.
|
/// Teacher not found.
|
||||||
#[status_code = "actix_web::http::StatusCode::NOT_FOUND"]
|
#[status_code = "actix_web::http::StatusCode::NOT_FOUND"]
|
||||||
#[display("Required teacher not found.")]
|
#[display("Required teacher not found.")]
|
||||||
|
|||||||
@@ -1,48 +1,35 @@
|
|||||||
use self::schema::*;
|
use self::schema::*;
|
||||||
use crate::AppState;
|
use crate::AppState;
|
||||||
use crate::routes::schedule::schema::ErrorCode;
|
|
||||||
use crate::routes::schema::{IntoResponseAsError, ResponseError};
|
|
||||||
use actix_web::{get, web};
|
use actix_web::{get, web};
|
||||||
|
|
||||||
#[utoipa::path(responses(
|
#[utoipa::path(responses((status = OK, body = Response)))]
|
||||||
(status = OK, body = Response),
|
|
||||||
(status = SERVICE_UNAVAILABLE, body = ResponseError<ErrorCode>),
|
|
||||||
))]
|
|
||||||
#[get("/teacher-names")]
|
#[get("/teacher-names")]
|
||||||
pub async fn teacher_names(app_state: web::Data<AppState>) -> ServiceResponse {
|
pub async fn teacher_names(app_state: web::Data<AppState>) -> Response {
|
||||||
// Prevent thread lock
|
let mut names: Vec<String> = app_state
|
||||||
let schedule_lock = app_state.schedule.lock().unwrap();
|
.get_schedule_snapshot("eng_polytechnic")
|
||||||
|
.await
|
||||||
|
.unwrap()
|
||||||
|
.data
|
||||||
|
.teachers
|
||||||
|
.keys()
|
||||||
|
.cloned()
|
||||||
|
.collect();
|
||||||
|
|
||||||
match schedule_lock.as_ref() {
|
|
||||||
None => ErrorCode::NoSchedule.into_response(),
|
|
||||||
Some(schedule) => {
|
|
||||||
let mut names: Vec<String> = schedule.data.teachers.keys().cloned().collect();
|
|
||||||
names.sort();
|
names.sort();
|
||||||
|
|
||||||
Ok(names.into()).into()
|
Response { names }
|
||||||
}
|
|
||||||
}
|
|
||||||
.into()
|
|
||||||
}
|
}
|
||||||
|
|
||||||
mod schema {
|
mod schema {
|
||||||
use crate::routes::schedule::schema::ErrorCode;
|
use actix_macros::ResponderJson;
|
||||||
use serde::Serialize;
|
use serde::Serialize;
|
||||||
use utoipa::ToSchema;
|
use utoipa::ToSchema;
|
||||||
|
|
||||||
pub type ServiceResponse = crate::routes::schema::Response<Response, ErrorCode>;
|
#[derive(Serialize, ToSchema, ResponderJson)]
|
||||||
|
|
||||||
#[derive(Serialize, ToSchema)]
|
|
||||||
#[schema(as = GetTeacherNames::Response)]
|
#[schema(as = GetTeacherNames::Response)]
|
||||||
pub struct Response {
|
pub struct Response {
|
||||||
/// List of teacher names sorted alphabetically.
|
/// List of teacher names sorted alphabetically.
|
||||||
#[schema(examples(json!(["Хомченко Н.Е."])))]
|
#[schema(examples(json!(["Хомченко Н.Е."])))]
|
||||||
pub names: Vec<String>,
|
pub names: Vec<String>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl From<Vec<String>> for Response {
|
|
||||||
fn from(names: Vec<String>) -> Self {
|
|
||||||
Self { names }
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,137 +0,0 @@
|
|||||||
use self::schema::*;
|
|
||||||
use crate::AppState;
|
|
||||||
use crate::app_state::Schedule;
|
|
||||||
use crate::parser::parse_xls;
|
|
||||||
use crate::routes::schedule::schema::CacheStatus;
|
|
||||||
use crate::routes::schema::{IntoResponseAsError, ResponseError};
|
|
||||||
use crate::xls_downloader::interface::XLSDownloader;
|
|
||||||
use actix_web::web::Json;
|
|
||||||
use actix_web::{patch, web};
|
|
||||||
use chrono::Utc;
|
|
||||||
|
|
||||||
#[utoipa::path(responses(
|
|
||||||
(status = OK, body = CacheStatus),
|
|
||||||
(status = NOT_ACCEPTABLE, body = ResponseError<ErrorCode>),
|
|
||||||
))]
|
|
||||||
#[patch("/update-download-url")]
|
|
||||||
pub async fn update_download_url(
|
|
||||||
data: Json<Request>,
|
|
||||||
app_state: web::Data<AppState>,
|
|
||||||
) -> ServiceResponse {
|
|
||||||
if !data.url.starts_with("https://politehnikum-eng.ru/") {
|
|
||||||
return ErrorCode::NonWhitelistedHost.into_response();
|
|
||||||
}
|
|
||||||
|
|
||||||
let mut downloader = app_state.downloader.lock().unwrap();
|
|
||||||
|
|
||||||
if let Some(url) = &downloader.url {
|
|
||||||
if url.eq(&data.url) {
|
|
||||||
return Ok(CacheStatus::from(&app_state)).into();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
match downloader.set_url(data.url.clone()).await {
|
|
||||||
Ok(fetch_result) => {
|
|
||||||
let mut schedule = app_state.schedule.lock().unwrap();
|
|
||||||
|
|
||||||
if schedule.is_some()
|
|
||||||
&& fetch_result.uploaded_at < schedule.as_ref().unwrap().updated_at
|
|
||||||
{
|
|
||||||
return ErrorCode::OutdatedSchedule.into_response();
|
|
||||||
}
|
|
||||||
|
|
||||||
match downloader.fetch(false).await {
|
|
||||||
Ok(download_result) => match parse_xls(&download_result.data.unwrap()) {
|
|
||||||
Ok(data) => {
|
|
||||||
*schedule = Some(Schedule {
|
|
||||||
etag: download_result.etag,
|
|
||||||
fetched_at: download_result.requested_at,
|
|
||||||
updated_at: download_result.uploaded_at,
|
|
||||||
parsed_at: Utc::now(),
|
|
||||||
data,
|
|
||||||
});
|
|
||||||
|
|
||||||
Ok(CacheStatus::from(schedule.as_ref().unwrap())).into()
|
|
||||||
}
|
|
||||||
Err(error) => {
|
|
||||||
sentry::capture_error(&error);
|
|
||||||
|
|
||||||
ErrorCode::InvalidSchedule(error).into_response()
|
|
||||||
}
|
|
||||||
},
|
|
||||||
Err(error) => {
|
|
||||||
eprintln!("Unknown url provided {}", data.url);
|
|
||||||
eprintln!("{:?}", error);
|
|
||||||
|
|
||||||
ErrorCode::DownloadFailed.into_response()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Err(error) => {
|
|
||||||
eprintln!("Unknown url provided {}", data.url);
|
|
||||||
eprintln!("{:?}", error);
|
|
||||||
|
|
||||||
ErrorCode::FetchFailed.into_response()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
mod schema {
|
|
||||||
use crate::parser::schema::ParseError;
|
|
||||||
use crate::routes::schedule::schema::CacheStatus;
|
|
||||||
use actix_macros::{IntoResponseErrorNamed, StatusCode};
|
|
||||||
use derive_more::Display;
|
|
||||||
use serde::{Deserialize, Serialize, Serializer};
|
|
||||||
use utoipa::ToSchema;
|
|
||||||
|
|
||||||
pub type ServiceResponse = crate::routes::schema::Response<CacheStatus, ErrorCode>;
|
|
||||||
|
|
||||||
#[derive(Serialize, Deserialize, ToSchema)]
|
|
||||||
pub struct Request {
|
|
||||||
/// Schedule link.
|
|
||||||
pub url: String,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Clone, ToSchema, StatusCode, Display, IntoResponseErrorNamed)]
|
|
||||||
#[status_code = "actix_web::http::StatusCode::NOT_ACCEPTABLE"]
|
|
||||||
#[schema(as = SetDownloadUrl::ErrorCode)]
|
|
||||||
pub enum ErrorCode {
|
|
||||||
/// Transferred link with host different from politehnikum-eng.ru.
|
|
||||||
#[display("URL with unknown host provided. Provide url with 'politehnikum-eng.ru' host.")]
|
|
||||||
NonWhitelistedHost,
|
|
||||||
|
|
||||||
/// Failed to retrieve file metadata.
|
|
||||||
#[display("Unable to retrieve metadata from the specified URL.")]
|
|
||||||
FetchFailed,
|
|
||||||
|
|
||||||
/// Failed to download the file.
|
|
||||||
#[display("Unable to retrieve data from the specified URL.")]
|
|
||||||
DownloadFailed,
|
|
||||||
|
|
||||||
/// The link leads to an outdated schedule.
|
|
||||||
///
|
|
||||||
/// An outdated schedule refers to a schedule that was published earlier
|
|
||||||
/// than is currently available.
|
|
||||||
#[display("The schedule is older than it already is.")]
|
|
||||||
OutdatedSchedule,
|
|
||||||
|
|
||||||
/// Failed to parse the schedule.
|
|
||||||
#[display("{_0}")]
|
|
||||||
InvalidSchedule(ParseError),
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Serialize for ErrorCode {
|
|
||||||
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
|
|
||||||
where
|
|
||||||
S: Serializer,
|
|
||||||
{
|
|
||||||
match self {
|
|
||||||
ErrorCode::NonWhitelistedHost => serializer.serialize_str("NON_WHITELISTED_HOST"),
|
|
||||||
ErrorCode::FetchFailed => serializer.serialize_str("FETCH_FAILED"),
|
|
||||||
ErrorCode::DownloadFailed => serializer.serialize_str("DOWNLOAD_FAILED"),
|
|
||||||
ErrorCode::OutdatedSchedule => serializer.serialize_str("OUTDATED_SCHEDULE"),
|
|
||||||
ErrorCode::InvalidSchedule(_) => serializer.serialize_str("INVALID_SCHEDULE"),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -4,33 +4,30 @@ use actix_web::http::StatusCode;
|
|||||||
use actix_web::{HttpRequest, HttpResponse, Responder};
|
use actix_web::{HttpRequest, HttpResponse, Responder};
|
||||||
use serde::{Serialize, Serializer};
|
use serde::{Serialize, Serializer};
|
||||||
use std::convert::Into;
|
use std::convert::Into;
|
||||||
|
use std::fmt::Display;
|
||||||
use utoipa::PartialSchema;
|
use utoipa::PartialSchema;
|
||||||
|
|
||||||
pub struct Response<T, E>(pub Result<T, E>)
|
pub struct Response<T, E>(pub Result<T, E>)
|
||||||
where
|
where
|
||||||
T: Serialize + PartialSchema,
|
T: Serialize + PartialSchema + PartialOkResponse,
|
||||||
E: Serialize + PartialSchema + Clone + PartialStatusCode;
|
E: Serialize + PartialSchema + Display + PartialErrResponse;
|
||||||
|
|
||||||
pub trait PartialStatusCode {
|
|
||||||
fn status_code(&self) -> StatusCode;
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Transform Response<T, E> into Result<T, E>
|
/// Transform Response<T, E> into Result<T, E>
|
||||||
impl<T, E> Into<Result<T, E>> for Response<T, E>
|
impl<T, E> From<Response<T, E>> for Result<T, E>
|
||||||
where
|
where
|
||||||
T: Serialize + PartialSchema,
|
T: Serialize + PartialSchema + PartialOkResponse,
|
||||||
E: Serialize + PartialSchema + Clone + PartialStatusCode,
|
E: Serialize + PartialSchema + Display + PartialErrResponse,
|
||||||
{
|
{
|
||||||
fn into(self) -> Result<T, E> {
|
fn from(value: Response<T, E>) -> Self {
|
||||||
self.0
|
value.0
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Transform T into Response<T, E>
|
/// Transform T into Response<T, E>
|
||||||
impl<T, E> From<Result<T, E>> for Response<T, E>
|
impl<T, E> From<Result<T, E>> for Response<T, E>
|
||||||
where
|
where
|
||||||
T: Serialize + PartialSchema,
|
T: Serialize + PartialSchema + PartialOkResponse,
|
||||||
E: Serialize + PartialSchema + Clone + PartialStatusCode,
|
E: Serialize + PartialSchema + Display + PartialErrResponse,
|
||||||
{
|
{
|
||||||
fn from(value: Result<T, E>) -> Self {
|
fn from(value: Result<T, E>) -> Self {
|
||||||
Response(value)
|
Response(value)
|
||||||
@@ -40,17 +37,16 @@ where
|
|||||||
/// Serialize Response<T, E>
|
/// Serialize Response<T, E>
|
||||||
impl<T, E> Serialize for Response<T, E>
|
impl<T, E> Serialize for Response<T, E>
|
||||||
where
|
where
|
||||||
T: Serialize + PartialSchema,
|
T: Serialize + PartialSchema + PartialOkResponse,
|
||||||
E: Serialize + PartialSchema + Clone + PartialStatusCode + Into<ResponseError<E>>,
|
E: Serialize + PartialSchema + Display + PartialErrResponse + Clone + Into<ResponseError<E>>,
|
||||||
{
|
{
|
||||||
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
|
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
|
||||||
where
|
where
|
||||||
S: Serializer,
|
S: Serializer,
|
||||||
{
|
{
|
||||||
match &self.0 {
|
match &self.0 {
|
||||||
Ok(ok) => serializer.serialize_some::<T>(&ok),
|
Ok(ok) => serializer.serialize_some(&ok),
|
||||||
Err(err) => serializer
|
Err(err) => serializer.serialize_some(&err.clone().into()),
|
||||||
.serialize_some::<ResponseError<E>>(&ResponseError::<E>::from(err.clone().into())),
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -58,12 +54,12 @@ where
|
|||||||
/// Transform Response<T, E> to HttpResponse<String>
|
/// Transform Response<T, E> to HttpResponse<String>
|
||||||
impl<T, E> Responder for Response<T, E>
|
impl<T, E> Responder for Response<T, E>
|
||||||
where
|
where
|
||||||
T: Serialize + PartialSchema,
|
T: Serialize + PartialSchema + PartialOkResponse,
|
||||||
E: Serialize + PartialSchema + Clone + PartialStatusCode + Into<ResponseError<E>>,
|
E: Serialize + PartialSchema + Display + PartialErrResponse + Clone + Into<ResponseError<E>>,
|
||||||
{
|
{
|
||||||
type Body = EitherBody<String>;
|
type Body = EitherBody<String>;
|
||||||
|
|
||||||
fn respond_to(self, _: &HttpRequest) -> HttpResponse<Self::Body> {
|
fn respond_to(mut self, request: &HttpRequest) -> HttpResponse<Self::Body> {
|
||||||
match serde_json::to_string(&self) {
|
match serde_json::to_string(&self) {
|
||||||
Ok(body) => {
|
Ok(body) => {
|
||||||
let code = match &self.0 {
|
let code = match &self.0 {
|
||||||
@@ -71,13 +67,19 @@ where
|
|||||||
Err(e) => e.status_code(),
|
Err(e) => e.status_code(),
|
||||||
};
|
};
|
||||||
|
|
||||||
match HttpResponse::build(code)
|
let mut response = match HttpResponse::build(code)
|
||||||
.content_type(mime::APPLICATION_JSON)
|
.content_type(mime::APPLICATION_JSON)
|
||||||
.message_body(body)
|
.message_body(body)
|
||||||
{
|
{
|
||||||
Ok(res) => res.map_into_left_body(),
|
Ok(res) => res.map_into_left_body(),
|
||||||
Err(err) => HttpResponse::from_error(err).map_into_right_body(),
|
Err(err) => HttpResponse::from_error(err).map_into_right_body(),
|
||||||
|
};
|
||||||
|
|
||||||
|
if let Ok(ok) = &mut self.0 {
|
||||||
|
ok.post_process(request, &mut response);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
response
|
||||||
}
|
}
|
||||||
|
|
||||||
Err(err) => {
|
Err(err) => {
|
||||||
@@ -87,73 +89,109 @@ where
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// ResponseError<T>
|
/// Трейт для всех положительных ответов от сервера
|
||||||
///
|
pub trait PartialOkResponse {
|
||||||
/// Field `message` is optional for backwards compatibility with Android App, that produces error if new fields will be added to JSON response.
|
fn post_process(
|
||||||
#[derive(Serialize, utoipa::ToSchema)]
|
&mut self,
|
||||||
pub struct ResponseError<T: Serialize + PartialSchema> {
|
_request: &HttpRequest,
|
||||||
pub code: T,
|
_response: &mut HttpResponse<EitherBody<String>>,
|
||||||
|
) {
|
||||||
#[serde(skip_serializing_if = "Option::is_none")]
|
}
|
||||||
pub message: Option<String>,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub trait IntoResponseAsError<T>
|
impl PartialOkResponse for () {}
|
||||||
|
|
||||||
|
/// Трейт для всех отрицательных ответов от сервера
|
||||||
|
pub trait PartialErrResponse {
|
||||||
|
fn status_code(&self) -> StatusCode;
|
||||||
|
}
|
||||||
|
|
||||||
|
/// ResponseError<T>
|
||||||
|
#[derive(Serialize, utoipa::ToSchema)]
|
||||||
|
pub struct ResponseError<T: Serialize + PartialSchema + Clone> {
|
||||||
|
pub code: T,
|
||||||
|
pub message: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<T> From<T> for ResponseError<T>
|
||||||
where
|
where
|
||||||
T: Serialize + PartialSchema,
|
T: Serialize + PartialSchema + Display + Clone,
|
||||||
Self: Serialize + PartialSchema + Clone + PartialStatusCode + Into<ResponseError<Self>>,
|
|
||||||
{
|
{
|
||||||
fn into_response(self) -> Response<T, Self> {
|
fn from(code: T) -> Self {
|
||||||
Response(Err(self))
|
Self {
|
||||||
|
message: format!("{}", code),
|
||||||
|
code,
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub mod user {
|
pub mod user {
|
||||||
use crate::database::models::{User, UserRole};
|
use actix_macros::{OkResponse, ResponderJson};
|
||||||
use actix_macros::ResponderJson;
|
use database::entity::sea_orm_active_enums::UserRole;
|
||||||
|
use database::entity::User;
|
||||||
use serde::Serialize;
|
use serde::Serialize;
|
||||||
|
|
||||||
//noinspection SpellCheckingInspection
|
//noinspection SpellCheckingInspection
|
||||||
/// Используется для скрытия чувствительных полей, таких как хеш пароля или FCM
|
/// Используется для скрытия чувствительных полей, таких как хеш пароля
|
||||||
#[derive(Serialize, utoipa::ToSchema, ResponderJson)]
|
#[derive(Serialize, utoipa::ToSchema, ResponderJson, OkResponse)]
|
||||||
#[serde(rename_all = "camelCase")]
|
#[serde(rename_all = "camelCase")]
|
||||||
pub struct UserResponse {
|
pub struct UserResponse {
|
||||||
/// UUID
|
/// UUID
|
||||||
#[schema(examples("67dcc9a9507b0000772744a2"))]
|
#[schema(examples("67dcc9a9507b0000772744a2"))]
|
||||||
id: String,
|
pub id: String,
|
||||||
|
|
||||||
/// Имя пользователя
|
/// Имя пользователя
|
||||||
#[schema(examples("n08i40k"))]
|
#[schema(examples("n08i40k"))]
|
||||||
username: String,
|
pub username: String,
|
||||||
|
|
||||||
/// Группа
|
/// Группа
|
||||||
#[schema(examples("ИС-214/23"))]
|
#[schema(examples("ИС-214/23"))]
|
||||||
group: String,
|
pub group: Option<String>,
|
||||||
|
|
||||||
/// Роль
|
/// Роль
|
||||||
role: UserRole,
|
pub role: UserRole,
|
||||||
|
|
||||||
/// Идентификатор привязанного аккаунта VK
|
/// Идентификатор привязанного аккаунта VK
|
||||||
#[schema(examples(498094647, json!(null)))]
|
#[schema(examples(498094647, json!(null)))]
|
||||||
vk_id: Option<i32>,
|
pub vk_id: Option<i32>,
|
||||||
|
|
||||||
|
/// Идентификатор привязанного аккаунта Telegram
|
||||||
|
#[schema(examples(996004735, json!(null)))]
|
||||||
|
pub telegram_id: Option<i64>,
|
||||||
|
|
||||||
/// JWT токен доступа
|
/// JWT токен доступа
|
||||||
#[schema(examples(
|
#[schema(examples(
|
||||||
"eyJ0eXAiOiJKV1QiLCJhbGciOiJIUzI1NiJ9.eyJpZCI6IjY3ZGNjOWE5NTA3YjAwMDA3NzI3NDRhMiIsImlhdCI6IjE3NDMxMDgwOTkiLCJleHAiOiIxODY5MjUyMDk5In0.rMgXRb3JbT9AvLK4eiY9HMB5LxgUudkpQyoWKOypZFY"
|
"eyJ0eXAiOiJKV1QiLCJhbGciOiJIUzI1NiJ9.eyJpZCI6IjY3ZGNjOWE5NTA3YjAwMDA3NzI3NDRhMiIsImlhdCI6IjE3NDMxMDgwOTkiLCJleHAiOiIxODY5MjUyMDk5In0.rMgXRb3JbT9AvLK4eiY9HMB5LxgUudkpQyoWKOypZFY"
|
||||||
))]
|
))]
|
||||||
access_token: String,
|
#[serde(skip_serializing_if = "Option::is_none")]
|
||||||
|
pub access_token: Option<String>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl UserResponse {
|
||||||
|
pub fn from_user_with_token(user: User, access_token: String) -> Self {
|
||||||
|
Self {
|
||||||
|
id: user.id.clone(),
|
||||||
|
username: user.username.clone(),
|
||||||
|
group: user.group.clone(),
|
||||||
|
role: user.role.clone(),
|
||||||
|
vk_id: user.vk_id,
|
||||||
|
telegram_id: user.telegram_id,
|
||||||
|
access_token: Some(access_token),
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Create UserResponse from User ref.
|
/// Create UserResponse from User ref.
|
||||||
impl From<&User> for UserResponse {
|
impl From<&User> for UserResponse {
|
||||||
fn from(user: &User) -> Self {
|
fn from(user: &User) -> Self {
|
||||||
UserResponse {
|
Self {
|
||||||
id: user.id.clone(),
|
id: user.id.clone(),
|
||||||
username: user.username.clone(),
|
username: user.username.clone(),
|
||||||
group: user.group.clone(),
|
group: user.group.clone(),
|
||||||
role: user.role.clone(),
|
role: user.role.clone(),
|
||||||
vk_id: user.vk_id.clone(),
|
vk_id: user.vk_id,
|
||||||
access_token: user.access_token.clone(),
|
telegram_id: user.telegram_id,
|
||||||
|
access_token: None,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -161,13 +199,14 @@ pub mod user {
|
|||||||
/// Transform User to UserResponse.
|
/// Transform User to UserResponse.
|
||||||
impl From<User> for UserResponse {
|
impl From<User> for UserResponse {
|
||||||
fn from(user: User) -> Self {
|
fn from(user: User) -> Self {
|
||||||
UserResponse {
|
Self {
|
||||||
id: user.id,
|
id: user.id,
|
||||||
username: user.username,
|
username: user.username,
|
||||||
group: user.group,
|
group: user.group,
|
||||||
role: user.role,
|
role: user.role,
|
||||||
vk_id: user.vk_id,
|
vk_id: user.vk_id,
|
||||||
access_token: user.access_token,
|
telegram_id: user.telegram_id,
|
||||||
|
access_token: None,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
56
src/routes/users/by.rs
Normal file
56
src/routes/users/by.rs
Normal file
@@ -0,0 +1,56 @@
|
|||||||
|
use crate::routes::schema::user::UserResponse;
|
||||||
|
use crate::routes::users::by::schema::{ErrorCode, ServiceResponse};
|
||||||
|
use crate::state::AppState;
|
||||||
|
use actix_web::{get, web};
|
||||||
|
use database::query::Query;
|
||||||
|
|
||||||
|
#[utoipa::path(responses((status = OK, body = UserResponse)))]
|
||||||
|
#[get("/id/{id}")]
|
||||||
|
pub async fn by_id(app_state: web::Data<AppState>, path: web::Path<String>) -> ServiceResponse {
|
||||||
|
let user_id = path.into_inner();
|
||||||
|
|
||||||
|
let db = app_state.get_database();
|
||||||
|
|
||||||
|
match Query::find_user_by_id(db, &user_id).await {
|
||||||
|
Ok(Some(user)) => Ok(UserResponse::from(user)),
|
||||||
|
_ => Err(ErrorCode::NotFound),
|
||||||
|
}
|
||||||
|
.into()
|
||||||
|
}
|
||||||
|
|
||||||
|
#[utoipa::path(responses((status = OK, body = UserResponse)))]
|
||||||
|
#[get("/telegram-id/{id}")]
|
||||||
|
pub async fn by_telegram_id(
|
||||||
|
app_state: web::Data<AppState>,
|
||||||
|
path: web::Path<i64>,
|
||||||
|
) -> ServiceResponse {
|
||||||
|
let telegram_id = path.into_inner();
|
||||||
|
|
||||||
|
let db = app_state.get_database();
|
||||||
|
|
||||||
|
match Query::find_user_by_telegram_id(db, telegram_id).await {
|
||||||
|
Ok(Some(user)) => Ok(UserResponse::from(user)),
|
||||||
|
_ => Err(ErrorCode::NotFound),
|
||||||
|
}
|
||||||
|
.into()
|
||||||
|
}
|
||||||
|
|
||||||
|
mod schema {
|
||||||
|
use crate::routes::schema::user::UserResponse;
|
||||||
|
use actix_macros::ErrResponse;
|
||||||
|
use derive_more::Display;
|
||||||
|
use serde::Serialize;
|
||||||
|
use utoipa::ToSchema;
|
||||||
|
|
||||||
|
pub type ServiceResponse = crate::routes::schema::Response<UserResponse, ErrorCode>;
|
||||||
|
|
||||||
|
#[derive(Clone, Serialize, Display, ToSchema, ErrResponse)]
|
||||||
|
#[serde(rename_all = "SCREAMING_SNAKE_CASE")]
|
||||||
|
#[schema(as = Users::By::ErrorCode)]
|
||||||
|
pub enum ErrorCode {
|
||||||
|
/// User not found.
|
||||||
|
#[status_code = "actix_web::http::StatusCode::NOT_FOUND"]
|
||||||
|
#[display("Required user not found.")]
|
||||||
|
NotFound,
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -1,85 +1,69 @@
|
|||||||
use self::schema::*;
|
use self::schema::*;
|
||||||
use crate::app_state::AppState;
|
use crate::extractors::base::AsyncExtractor;
|
||||||
use crate::database::driver::users::UserSave;
|
use crate::state::AppState;
|
||||||
use crate::database::models::User;
|
|
||||||
use crate::extractors::base::SyncExtractor;
|
|
||||||
use crate::routes::schema::IntoResponseAsError;
|
|
||||||
use crate::utility::mutex::MutexScope;
|
|
||||||
use actix_web::{post, web};
|
use actix_web::{post, web};
|
||||||
|
use database::entity::User;
|
||||||
|
use database::sea_orm::{ActiveModelTrait, IntoActiveModel, Set};
|
||||||
|
|
||||||
#[utoipa::path(responses((status = OK)))]
|
#[utoipa::path(responses((status = OK)))]
|
||||||
#[post("/change-group")]
|
#[post("/change-group")]
|
||||||
pub async fn change_group(
|
pub async fn change_group(
|
||||||
app_state: web::Data<AppState>,
|
app_state: web::Data<AppState>,
|
||||||
user: SyncExtractor<User>,
|
user: AsyncExtractor<User>,
|
||||||
data: web::Json<Request>,
|
data: web::Json<Request>,
|
||||||
) -> ServiceResponse {
|
) -> ServiceResponse {
|
||||||
let mut user = user.into_inner();
|
let user = user.into_inner();
|
||||||
|
|
||||||
if user.group == data.group {
|
if user
|
||||||
return ErrorCode::SameGroup.into_response();
|
.group
|
||||||
|
.as_ref()
|
||||||
|
.is_some_and(|group| group.eq(&data.group))
|
||||||
|
{
|
||||||
|
return Ok(()).into();
|
||||||
}
|
}
|
||||||
|
|
||||||
if let Some(e) = app_state.schedule.scope(|schedule| match schedule {
|
if !app_state
|
||||||
Some(schedule) => {
|
.get_schedule_snapshot("eng_polytechnic")
|
||||||
if schedule.data.groups.contains_key(&data.group) {
|
.await
|
||||||
None
|
.unwrap()
|
||||||
} else {
|
.data
|
||||||
Some(ErrorCode::NotFound)
|
.groups
|
||||||
}
|
.contains_key(&data.group)
|
||||||
}
|
{
|
||||||
None => Some(ErrorCode::NoSchedule),
|
return Err(ErrorCode::NotFound).into();
|
||||||
}) {
|
|
||||||
return e.into_response();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
user.group = data.into_inner().group;
|
let mut active_user = user.clone().into_active_model();
|
||||||
|
active_user.group = Set(Some(data.into_inner().group));
|
||||||
|
|
||||||
if let Some(e) = user.save(&app_state).err() {
|
active_user.update(app_state.get_database()).await.unwrap();
|
||||||
eprintln!("Failed to update user: {e}");
|
|
||||||
return ErrorCode::InternalServerError.into_response();
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(()).into()
|
Ok(()).into()
|
||||||
}
|
}
|
||||||
|
|
||||||
mod schema {
|
mod schema {
|
||||||
use actix_macros::{IntoResponseErrorNamed, StatusCode};
|
use actix_macros::ErrResponse;
|
||||||
use derive_more::Display;
|
use derive_more::Display;
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
use utoipa::ToSchema;
|
use utoipa::ToSchema;
|
||||||
|
|
||||||
pub type ServiceResponse = crate::routes::schema::Response<(), ErrorCode>;
|
pub type ServiceResponse = crate::routes::schema::Response<(), ErrorCode>;
|
||||||
|
|
||||||
#[derive(Serialize, Deserialize, ToSchema)]
|
#[derive(Deserialize, ToSchema)]
|
||||||
#[schema(as = ChangeGroup::Request)]
|
#[schema(as = ChangeGroup::Request)]
|
||||||
pub struct Request {
|
pub struct Request {
|
||||||
/// Group name.
|
// Group.
|
||||||
pub group: String,
|
pub group: String,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Clone, Serialize, ToSchema, StatusCode, Display, IntoResponseErrorNamed)]
|
#[derive(Clone, Serialize, Display, ToSchema, ErrResponse)]
|
||||||
#[serde(rename_all = "SCREAMING_SNAKE_CASE")]
|
#[serde(rename_all = "SCREAMING_SNAKE_CASE")]
|
||||||
#[schema(as = ChangeGroup::ErrorCode)]
|
#[schema(as = ChangeGroup::ErrorCode)]
|
||||||
#[status_code = "actix_web::http::StatusCode::CONFLICT"]
|
#[status_code = "actix_web::http::StatusCode::CONFLICT"]
|
||||||
pub enum ErrorCode {
|
pub enum ErrorCode {
|
||||||
/// Schedules have not yet been received.
|
|
||||||
#[display("Schedule not parsed yet.")]
|
|
||||||
#[status_code = "actix_web::http::StatusCode::SERVICE_UNAVAILABLE"]
|
|
||||||
NoSchedule,
|
|
||||||
|
|
||||||
/// Passed the same group name that is currently there.
|
|
||||||
#[display("Passed the same group name as it is at the moment.")]
|
|
||||||
SameGroup,
|
|
||||||
|
|
||||||
/// The required group does not exist.
|
/// The required group does not exist.
|
||||||
#[display("The required group does not exist.")]
|
#[display("The required group does not exist.")]
|
||||||
#[status_code = "actix_web::http::StatusCode::NOT_FOUND"]
|
#[status_code = "actix_web::http::StatusCode::NOT_FOUND"]
|
||||||
NotFound,
|
NotFound,
|
||||||
|
|
||||||
/// Server-side error.
|
|
||||||
#[display("Internal server error.")]
|
|
||||||
#[status_code = "actix_web::http::StatusCode::INTERNAL_SERVER_ERROR"]
|
|
||||||
InternalServerError,
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,41 +1,42 @@
|
|||||||
use self::schema::*;
|
use self::schema::*;
|
||||||
use crate::app_state::AppState;
|
use crate::extractors::base::AsyncExtractor;
|
||||||
use crate::database::driver;
|
use crate::state::AppState;
|
||||||
use crate::database::driver::users::UserSave;
|
|
||||||
use crate::database::models::User;
|
|
||||||
use crate::extractors::base::SyncExtractor;
|
|
||||||
use crate::routes::schema::IntoResponseAsError;
|
|
||||||
use actix_web::{post, web};
|
use actix_web::{post, web};
|
||||||
|
use database::entity::User;
|
||||||
|
use database::query::Query;
|
||||||
|
use database::sea_orm::{ActiveModelTrait, IntoActiveModel, Set};
|
||||||
|
|
||||||
#[utoipa::path(responses((status = OK)))]
|
#[utoipa::path(responses((status = OK)))]
|
||||||
#[post("/change-username")]
|
#[post("/change-username")]
|
||||||
pub async fn change_username(
|
pub async fn change_username(
|
||||||
app_state: web::Data<AppState>,
|
app_state: web::Data<AppState>,
|
||||||
user: SyncExtractor<User>,
|
user: AsyncExtractor<User>,
|
||||||
data: web::Json<Request>,
|
data: web::Json<Request>,
|
||||||
) -> ServiceResponse {
|
) -> ServiceResponse {
|
||||||
let mut user = user.into_inner();
|
let user = user.into_inner();
|
||||||
|
|
||||||
if user.username == data.username {
|
if user.username == data.username {
|
||||||
return ErrorCode::SameUsername.into_response();
|
return Ok(()).into();
|
||||||
}
|
}
|
||||||
|
|
||||||
if driver::users::get_by_username(&app_state, &data.username).is_ok() {
|
let db = app_state.get_database();
|
||||||
return ErrorCode::AlreadyExists.into_response();
|
|
||||||
|
if Query::is_user_exists_by_username(db, &data.username)
|
||||||
|
.await
|
||||||
|
.unwrap()
|
||||||
|
{
|
||||||
|
return Err(ErrorCode::AlreadyExists).into();
|
||||||
}
|
}
|
||||||
|
|
||||||
user.username = data.into_inner().username;
|
let mut active_user = user.into_active_model();
|
||||||
|
active_user.username = Set(data.into_inner().username);
|
||||||
if let Some(e) = user.save(&app_state).err() {
|
active_user.update(db).await.unwrap();
|
||||||
eprintln!("Failed to update user: {e}");
|
|
||||||
return ErrorCode::InternalServerError.into_response();
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(()).into()
|
Ok(()).into()
|
||||||
}
|
}
|
||||||
|
|
||||||
mod schema {
|
mod schema {
|
||||||
use actix_macros::{IntoResponseErrorNamed, StatusCode};
|
use actix_macros::ErrResponse;
|
||||||
use derive_more::Display;
|
use derive_more::Display;
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
use utoipa::ToSchema;
|
use utoipa::ToSchema;
|
||||||
@@ -49,22 +50,13 @@ mod schema {
|
|||||||
pub username: String,
|
pub username: String,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Clone, Serialize, ToSchema, StatusCode, Display, IntoResponseErrorNamed)]
|
#[derive(Clone, Serialize, Display, ToSchema, ErrResponse)]
|
||||||
#[serde(rename_all = "SCREAMING_SNAKE_CASE")]
|
#[serde(rename_all = "SCREAMING_SNAKE_CASE")]
|
||||||
#[schema(as = ChangeUsername::ErrorCode)]
|
#[schema(as = ChangeUsername::ErrorCode)]
|
||||||
#[status_code = "actix_web::http::StatusCode::CONFLICT"]
|
#[status_code = "actix_web::http::StatusCode::CONFLICT"]
|
||||||
pub enum ErrorCode {
|
pub enum ErrorCode {
|
||||||
/// The same name that is currently present is passed.
|
|
||||||
#[display("Passed the same name as it is at the moment.")]
|
|
||||||
SameUsername,
|
|
||||||
|
|
||||||
/// A user with this name already exists.
|
/// A user with this name already exists.
|
||||||
#[display("A user with this name already exists.")]
|
#[display("A user with this name already exists.")]
|
||||||
AlreadyExists,
|
AlreadyExists,
|
||||||
|
|
||||||
/// Server-side error.
|
|
||||||
#[display("Internal server error.")]
|
|
||||||
#[status_code = "actix_web::http::StatusCode::INTERNAL_SERVER_ERROR"]
|
|
||||||
InternalServerError,
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user