Compare commits

...

93 Commits

Author SHA1 Message Date
0010cdde83 hddhfgggsasdfa
All checks were successful
Build and push Docker image / build (push) Successful in 7m37s
Build and push Docker image / deploy (push) Successful in 1m4s
2024-09-22 11:37:14 +10:00
26cc4fb093 Switched Docker images on Gitea host
Some checks failed
Build and push Docker image / build (push) Failing after 6m6s
Build and push Docker image / deploy (push) Has been skipped
2024-09-22 04:30:26 +10:00
689f44cf73 Use Alpine 3.20 image
Some checks failed
Build and push Docker image / build (push) Failing after 6m49s
Build and push Docker image / deploy (push) Has been skipped
2024-09-22 03:58:50 +10:00
78098a8204 Passchendaele this.
Some checks failed
Build and push Docker image / build (push) Failing after 6m39s
Build and push Docker image / deploy (push) Has been skipped
2024-09-22 03:44:55 +10:00
f458ac6d33 Oops... I hate coding now.
Some checks failed
Build and push Docker image / build (push) Failing after 6m40s
Build and push Docker image / deploy (push) Has been skipped
2024-09-22 03:32:29 +10:00
09450c3304 Ughhhh
Some checks failed
Build and push Docker image / build (push) Failing after 2m18s
Build and push Docker image / deploy (push) Has been skipped
2024-09-22 03:29:12 +10:00
960018f32e Add libssl-dev to fix compiler error
Some checks failed
Build and push Docker image / build (push) Failing after 2m9s
Build and push Docker image / deploy (push) Has been skipped
2024-09-22 03:21:49 +10:00
293da90978 This is a reason why I don't like Python
Some checks failed
Build and push Docker image / build (push) Failing after 2m13s
Build and push Docker image / deploy (push) Has been skipped
2024-09-22 03:18:39 +10:00
5a392f8cd9 Fix missing package manager
Some checks failed
Build and push Docker image / build (push) Failing after 1m2s
Build and push Docker image / deploy (push) Has been skipped
2024-09-22 03:15:12 +10:00
306e55d866 Housekeeping
Some checks failed
Build and push Docker image / build (push) Failing after 1m1s
Build and push Docker image / deploy (push) Has been skipped
2024-09-22 03:03:23 +10:00
cf8caf1aae Remove cache layer
All checks were successful
Build and push container image / build (push) Successful in 12m39s
Build and push container image / deploy (push) Successful in 36s
2024-08-20 11:50:32 +10:00
0713a82dc4 Attempt to fix timing issue
All checks were successful
Build and push container image / build (push) Successful in 12m32s
Build and push container image / deploy (push) Successful in 34s
2024-08-17 10:46:29 +10:00
cb660d13f4 Add debug logs
All checks were successful
Build and push container image / build (push) Successful in 12m40s
Build and push container image / deploy (push) Successful in 34s
2024-08-16 11:42:52 +10:00
dfa1ae75b2 Add IncidentColorMap enum
All checks were successful
Build and push container image / build (push) Successful in 12m31s
Build and push container image / deploy (push) Successful in 36s
2024-08-15 16:16:23 +10:00
1fa75a4d6f Restructure the RSS task
All checks were successful
Build and push container image / build (push) Successful in 12m34s
Build and push container image / deploy (push) Successful in 34s
2024-08-15 13:58:07 +10:00
f9471e471b [skip ci] Optimize the SSH script 2024-08-15 12:58:26 +10:00
4c79f5c2ff Does this work now?
All checks were successful
Build and push container image / build (push) Successful in 14m11s
Build and push container image / deploy (push) Successful in 55s
2024-08-15 12:51:59 +10:00
9dd77f1d54 Makes sense
Some checks failed
Build and push container image / deploy (push) Blocked by required conditions
Build and push container image / build (push) Has been cancelled
revert Update cache location
2024-08-14 22:48:08 -04:00
f2a80d7389 Curious if this works
Some checks failed
Build and push container image / build (push) Failing after 12m51s
Build and push container image / deploy (push) Has been skipped
2024-08-15 12:32:05 +10:00
9ec676fb8d Update cache location
Some checks failed
Build and push container image / build (push) Has been cancelled
2024-08-15 12:22:06 +10:00
e368b6bbdc [skip ci] Update OS tag 2024-08-15 11:57:28 +10:00
0b0e7304c1 Fix embed colors and increase limit
Some checks failed
Build and push container image / build (push) Has been cancelled
2024-08-15 11:35:25 +10:00
7c368404e4 Add GitHub RSS feed
All checks were successful
Build and push container image / build (push) Successful in 12m39s
2024-08-15 10:51:17 +10:00
025a65eefb Remove unused crate
All checks were successful
Build and push container image / build (push) Successful in 12m20s
2024-08-10 08:27:30 +10:00
aa62dfca29 [no ci] Update Alpine version for Postgres 2024-08-10 08:23:24 +10:00
2f5687fa2a Add EventService to gather System Health info
All checks were successful
Build and push container image / build (push) Successful in 12m35s
2024-08-09 11:18:26 +10:00
e64287a8c4 I can't read, it seems.
All checks were successful
Build and push container image / build (push) Successful in 12m57s
2024-08-08 17:11:54 +10:00
f7fb8fcf60 Oops.
Some checks failed
Build and push container image / build (push) Failing after 11m58s
2024-08-08 16:58:47 +10:00
b54db60ce4 Add commit hash for versioning
Some checks failed
Build and push container image / build (push) Has been cancelled
2024-08-08 16:56:40 +10:00
e519407557 Remove excessive logs for 'no new content'
All checks were successful
Build and push container image / build (push) Successful in 13m34s
2024-08-08 15:35:54 +10:00
eb5944754e Improvements to Dockerfile
All checks were successful
Build and push container image / build (push) Successful in 13m27s
2024-08-03 13:16:22 +10:00
ec1aa9db0a [no ci] Update controllers folder 2024-08-03 12:02:54 +10:00
048ce583e8 Block scheduler if another one is already running
All checks were successful
Build and push container image / build (push) Successful in 12m40s
2024-08-02 21:05:29 +10:00
483ba390e9 Bump version
All checks were successful
Build and push container image / build (push) Successful in 12m49s
2024-08-02 13:36:42 +10:00
246cb301c4 Allow GPortal RSS to update message 2024-08-02 13:33:46 +10:00
dd008c24b6 Pack logs into one message efficiently 2024-08-02 12:19:53 +10:00
f078eff53e Regret turning it back on
Some checks failed
Build and push container image / build (push) Has been cancelled
2024-08-01 20:29:04 -04:00
ddf605bd29 Update Rust crate serde_json to v1.0.122 (#118)
All checks were successful
Build and push container image / build (push) Successful in 12m53s
Co-authored-by: Renovate <system@toast-server.net>
Co-committed-by: Renovate <system@toast-server.net>
2024-08-01 17:47:31 -04:00
79217dc818 Update Rust crate sysinfo to v0.31.2 (#117)
All checks were successful
Build and push container image / build (push) Successful in 12m50s
Co-authored-by: Renovate <system@toast-server.net>
Co-committed-by: Renovate <system@toast-server.net>
2024-08-01 11:04:03 -04:00
15b629b497 Update Rust crate bb8-redis to 0.16.0 (#116)
All checks were successful
Build and push container image / build (push) Successful in 12m53s
Co-authored-by: Renovate <system@toast-server.net>
Co-committed-by: Renovate <system@toast-server.net>
2024-08-01 10:16:35 -04:00
e439e8b2c2 Update Rust crate sysinfo to v0.31.1 (#115)
All checks were successful
Build and push container image / build (push) Successful in 12m55s
Co-authored-by: Renovate <system@toast-server.net>
Co-committed-by: Renovate <system@toast-server.net>
2024-08-01 09:33:32 -04:00
c3e7a669ac Forward RSS logs to the Discord channel
All checks were successful
Build and push container image / build (push) Successful in 13m30s
2024-08-01 19:06:39 +10:00
09fccc1328 Update docker/build-push-action action to v6 (#114)
All checks were successful
Build and push container image / build (push) Successful in 14m32s
Co-authored-by: Renovate <system@toast-server.net>
Co-committed-by: Renovate <system@toast-server.net>
2024-08-01 00:48:37 -04:00
06bdb8b105 Update docker/build-push-action action to v5.4.0 (#113)
Some checks failed
Build and push container image / build (push) Has been cancelled
Co-authored-by: Renovate <system@toast-server.net>
Co-committed-by: Renovate <system@toast-server.net>
2024-08-01 00:47:54 -04:00
41e4ea633f Update actions/checkout action to v4.1.7 (#112)
Some checks failed
Build and push container image / build (push) Has been cancelled
Co-authored-by: Renovate <system@toast-server.net>
Co-committed-by: Renovate <system@toast-server.net>
2024-08-01 00:47:39 -04:00
cd32c9198b Update docker/setup-buildx-action digest to 988b5a0 (#111)
Some checks failed
Build and push container image / build (push) Has been cancelled
Co-authored-by: Renovate <system@toast-server.net>
Co-committed-by: Renovate <system@toast-server.net>
2024-08-01 00:46:45 -04:00
c1298d23ad Update docker/login-action digest to 9780b0c (#110)
Some checks failed
Build and push container image / build (push) Has been cancelled
Co-authored-by: Renovate <system@toast-server.net>
Co-committed-by: Renovate <system@toast-server.net>
2024-08-01 00:45:16 -04:00
ea45ece2f9 Pin dependencies (#109)
Some checks failed
Build and push container image / build (push) Has been cancelled
Co-authored-by: Renovate <system@toast-server.net>
Co-committed-by: Renovate <system@toast-server.net>
2024-08-01 00:44:48 -04:00
e88dfcc92a Turn Renovate back on
Some checks failed
Build and push container image / build (push) Has been cancelled
2024-08-01 14:44:29 +10:00
4d9253323d Put Soundfont file onto LFS
Some checks failed
Build and push container image / build (push) Has been cancelled
2024-08-01 14:35:12 +10:00
1ed1926697 Add more iLO data 2024-07-31 12:21:26 +10:00
5077bad918 Use defer in reply 2024-07-30 19:25:51 +10:00
ed719aaf4f Add iLO command 2024-07-30 19:00:31 +10:00
9da767866a Add bold markdown to the Rust blog message 2024-07-27 23:44:17 -04:00
cf96469579 [no ci] It got caught by the builder 2024-07-28 10:42:42 +10:00
c95a5d7f49 Add RSS implementation 2024-07-28 09:54:20 +10:00
1e557d25c0 Use the /etc/os-release instead for OS info 2024-07-25 15:42:10 +10:00
ce4d27f917 Update midi.rs 2024-07-25 15:39:15 +10:00
d9bb501c1e Add conversion command for midi files 2024-07-22 16:41:20 +10:00
13c83591d7 0.3.0 Rewrite
All checks were successful
Build and push container image / build (push) Successful in 11m49s
2024-07-20 09:56:58 +10:00
6c4ef8baeb Lockfile maintenance
All checks were successful
Build and push container image / build (push) Successful in 12m7s
2024-05-09 21:09:56 +10:00
7486990585 Update actions/checkout action to v4.1.5 (#108)
All checks were successful
Build and push container image / build (push) Successful in 12m20s
Co-authored-by: Renovate <system@toast-server.net>
Co-committed-by: Renovate <system@toast-server.net>
2024-05-08 16:03:12 -04:00
fa705421a1 Update Rust crate serde_json to 1.0.117 (#107)
All checks were successful
Build and push container image / build (push) Successful in 12m4s
Co-authored-by: Renovate <system@toast-server.net>
Co-committed-by: Renovate <system@toast-server.net>
2024-05-07 20:32:18 -04:00
390e3a7c8d Update Rust crate serde to 1.0.201 (#106)
Some checks failed
Build and push container image / build (push) Has been cancelled
Co-authored-by: Renovate <system@toast-server.net>
Co-committed-by: Renovate <system@toast-server.net>
2024-05-07 20:31:48 -04:00
5e29623d9d Update Rust crate sysinfo to 0.30.12 (#105)
All checks were successful
Build and push container image / build (push) Successful in 12m10s
Co-authored-by: Renovate <system@toast-server.net>
Co-committed-by: Renovate <system@toast-server.net>
2024-05-05 17:42:45 -04:00
927b74ac7e Update rust:1.78-alpine3.19 Docker digest to d4d3f81 (#104)
All checks were successful
Build and push container image / build (push) Successful in 12m4s
Co-authored-by: Renovate <system@toast-server.net>
Co-committed-by: Renovate <system@toast-server.net>
2024-05-02 19:52:58 -04:00
033cc4dbab Update Dockerfile
All checks were successful
Build and push container image / build (push) Successful in 12m10s
2024-05-02 19:21:22 -04:00
7f5b074af4 Fix uptime again
All checks were successful
Build and push container image / build (push) Successful in 12m22s
2024-05-03 06:38:53 +10:00
55297b4360 Pin rust Docker tag to 9b74675 (#103)
All checks were successful
Build and push container image / build (push) Successful in 12m45s
Co-authored-by: Renovate <system@toast-server.net>
Co-committed-by: Renovate <system@toast-server.net>
2024-05-02 16:16:30 -04:00
2ac696956b Implement cargo-chef
Some checks failed
Build and push container image / build (push) Has been cancelled
2024-05-03 06:15:46 +10:00
b9dbe2657c Update rust Docker tag to v1.78 (#102)
Some checks failed
Build and push container image / build (push) Failing after 42s
Co-authored-by: Renovate <system@toast-server.net>
Co-committed-by: Renovate <system@toast-server.net>
2024-05-02 13:49:50 -04:00
5ef1ae0e23 Update Rust crate serde to 1.0.200 (#101)
All checks were successful
Build and push container image / build (push) Successful in 11m0s
Co-authored-by: Renovate <system@toast-server.net>
Co-committed-by: Renovate <system@toast-server.net>
2024-05-01 12:09:20 -04:00
d9598f89f8 Update Rust crate serde to 1.0.199 (#100)
All checks were successful
Build and push container image / build (push) Successful in 10m56s
Co-authored-by: Renovate <system@toast-server.net>
Co-committed-by: Renovate <system@toast-server.net>
2024-04-27 00:36:26 -04:00
d39550feea Fix poisoned uptime command
All checks were successful
Build and push container image / build (push) Successful in 10m50s
2024-04-26 20:13:58 +10:00
9e04951265 Lock file maintenance
All checks were successful
Build and push container image / build (push) Successful in 10m36s
2024-04-26 15:12:50 +10:00
af8481ea74 Update Rust crate cargo_toml to 0.20.2 (#99)
All checks were successful
Build and push container image / build (push) Successful in 10m55s
Co-authored-by: Renovate <system@toast-server.net>
Co-committed-by: Renovate <system@toast-server.net>
2024-04-25 22:12:52 -04:00
5dc5fc2ed0 Update actions/checkout action to v4.1.4 (#98)
All checks were successful
Build and push container image / build (push) Successful in 11m2s
Co-authored-by: Renovate <system@toast-server.net>
Co-committed-by: Renovate <system@toast-server.net>
2024-04-25 08:49:56 -04:00
c7bb24c4ed Update Rust crate cargo_toml to 0.20.1 (#97)
All checks were successful
Build and push container image / build (push) Successful in 11m14s
Co-authored-by: Renovate <system@toast-server.net>
Co-committed-by: Renovate <system@toast-server.net>
2024-04-23 21:00:45 -04:00
b10808b1cf Update actions/checkout action to v4.1.3 (#96)
All checks were successful
Build and push container image / build (push) Successful in 11m7s
Co-authored-by: Renovate <system@toast-server.net>
Co-committed-by: Renovate <system@toast-server.net>
2024-04-22 10:34:55 -04:00
970777cdc4 Update Rust crate reqwest to 0.12.4 (#95)
All checks were successful
Build and push container image / build (push) Successful in 10m56s
Co-authored-by: Renovate <system@toast-server.net>
Co-committed-by: Renovate <system@toast-server.net>
2024-04-19 13:47:18 -04:00
ef0ad60e01 Update Rust crate cargo_toml to 0.20.0 (#94)
All checks were successful
Build and push container image / build (push) Successful in 10m51s
Co-authored-by: Renovate <system@toast-server.net>
Co-committed-by: Renovate <system@toast-server.net>
2024-04-19 12:28:05 -04:00
69aed61119 Update postgres:16.2-alpine3.19 Docker digest to 951bfda (#93)
All checks were successful
Build and push container image / build (push) Successful in 10m55s
Co-authored-by: Renovate <system@toast-server.net>
Co-committed-by: Renovate <system@toast-server.net>
2024-04-18 19:23:53 -04:00
dd5934ea84 Update rust:1.77-alpine3.19 Docker digest to 9b74675 (#92)
All checks were successful
Build and push container image / build (push) Successful in 10m45s
Co-authored-by: Renovate <system@toast-server.net>
Co-committed-by: Renovate <system@toast-server.net>
2024-04-18 16:50:32 -04:00
f06e9a9a07 Update postgres:16.2-alpine3.19 Docker digest to 20baeb0 (#91)
All checks were successful
Build and push container image / build (push) Successful in 10m37s
Co-authored-by: Renovate <system@toast-server.net>
Co-committed-by: Renovate <system@toast-server.net>
2024-04-18 16:22:29 -04:00
fb0afb11ab Update Rust crate sysinfo to 0.30.11 (#90)
All checks were successful
Build and push container image / build (push) Successful in 10m54s
Co-authored-by: Renovate <system@toast-server.net>
Co-committed-by: Renovate <system@toast-server.net>
2024-04-18 11:51:44 -04:00
6298efe4ad Update Rust crate serde to 1.0.198 (#89)
All checks were successful
Build and push container image / build (push) Successful in 10m56s
Co-authored-by: Renovate <system@toast-server.net>
Co-committed-by: Renovate <system@toast-server.net>
2024-04-16 17:21:56 -04:00
09cc5dab0c Update Rust crate serde_json to 1.0.116 (#88)
All checks were successful
Build and push container image / build (push) Successful in 10m52s
Co-authored-by: Renovate <system@toast-server.net>
Co-committed-by: Renovate <system@toast-server.net>
2024-04-16 01:16:29 -04:00
c6b4c00d64 Update Rust crate sysinfo to 0.30.10 (#87)
All checks were successful
Build and push container image / build (push) Successful in 10m28s
Co-authored-by: Renovate <system@toast-server.net>
Co-committed-by: Renovate <system@toast-server.net>
2024-04-10 10:59:50 -04:00
c814e2079a Update rust:1.77-alpine3.19 Docker digest to b6ea81b (#86)
All checks were successful
Build and push container image / build (push) Successful in 10m38s
Co-authored-by: Renovate <system@toast-server.net>
Co-committed-by: Renovate <system@toast-server.net>
2024-04-10 07:53:50 -04:00
c88e215b82 Update rust:1.77-alpine3.19 Docker digest to 59aa190 (#85)
All checks were successful
Build and push container image / build (push) Successful in 10m30s
Co-authored-by: Renovate <system@toast-server.net>
Co-committed-by: Renovate <system@toast-server.net>
2024-04-09 22:52:14 -04:00
ee016c419f Update .gitea/workflows/build.yml
All checks were successful
Build and push container image / build (push) Successful in 10m43s
2024-04-08 18:08:37 -04:00
9c4373e6b9 Update Rust crate sysinfo to 0.30.9 (#84)
All checks were successful
Build and push container image / build (push) Successful in 10m35s
Co-authored-by: Renovate <system@toast-server.net>
Co-committed-by: Renovate <system@toast-server.net>
2024-04-08 14:19:32 -04:00
6198fa5d4a Update docker/setup-buildx-action digest to d70bba7 (#83)
All checks were successful
Build and push container image / build (push) Successful in 10m32s
Co-authored-by: Renovate <system@toast-server.net>
Co-committed-by: Renovate <system@toast-server.net>
2024-04-08 04:15:55 -04:00
41 changed files with 2406 additions and 1121 deletions

2
.cargo/config.toml Normal file
View File

@ -0,0 +1,2 @@
[registries.gitea]
index = "sparse+https://git.toast-server.net/api/packages/toast/cargo/"

View File

@ -1,7 +1,9 @@
.vscode .vscode
target .cargo
.env .env
.gitignore .gitignore
.gitattributes
docker-compose.yml docker-compose.yml
Dockerfile
renovate.json renovate.json
run.sh run.sh

1
.gitattributes vendored Normal file
View File

@ -0,0 +1 @@
src/internals/assets/FluidR3_GM.sf2 filter=lfs diff=lfs merge=lfs -text

View File

@ -1,48 +1,91 @@
name: Build and push container image name: Build and push Docker image
on: on:
push: push:
branches: branches:
- master - master
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
jobs: jobs:
build: build:
runs-on: ubuntu-22.04-node runs-on: ubuntu-22.04
steps: env:
- name: Set up Docker environment RUNNER_TOOL_CACHE: /toolcache
run: |
apt update && apt upgrade -y && apt install -y apt-transport-https ca-certificates curl zstd gnupg lsb-release
curl -fsSL https://download.docker.com/linux/debian/gpg | gpg --dearmor -o /usr/share/keyrings/docker-archive-keyring.gpg
echo \
"deb [arch=amd64 signed-by=/usr/share/keyrings/docker-archive-keyring.gpg] https://download.docker.com/linux/debian \
$(lsb_release -cs) stable" | tee /etc/apt/sources.list.d/docker.list > /dev/null
apt update && apt install -y docker-ce docker-ce-cli containerd.io
- name: Checkout branch steps:
uses: https://github.com/actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2 - name: Set up Docker Buildx
uses: docker/setup-buildx-action@988b5a0280414f521da01fcc63a27aeeb4b104db # v3.6.1
- name: Checkout
uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7
- name: Install zstd
run: sudo apt-get update && sudo apt-get install -y zstd
- name: Generate cache key
id: cargo-cache-key
run: |
find ./Cargo.lock -type f -exec sha256sum {} + | sha256sum > hash.txt
cat hash.txt
- name: Cache
uses: actions/cache@0c45773b623bea8c8e75f6c82b208c3cf94ea4f9 # v4.0.2
id: cache
with:
save-always: true
path: |
$HOME/.cargo/bin/
$HOME/.cargo/git/
$HOME/.cargo/registry/index/
$HOME/.cargo/registry/cache/
target/
key: ${{ runner.os }}-cache-${{ steps.cargo-cache-key.outputs.hash }}
- name: Login to Gitea - name: Login to Gitea
uses: https://github.com/docker/login-action@e92390c5fb421da1463c202d546fed0ec5c39f20 # v3 uses: docker/login-action@9780b0c442fbb1117ed29e0efdff1e18412f7567 # v3.3.0
with: with:
registry: git.toast-server.net registry: git.toast-server.net
username: ${{ secrets.DOCKER_USERNAME }} username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_TOKEN }} password: ${{ secrets.DOCKER_TOKEN }}
- name: Set up Docker Buildx - name: Set up Rust toolchain
uses: https://github.com/docker/setup-buildx-action@2b51285047da1547ffb1b2203d8be4c0af6b1f20 # v3 uses: actions-rust-lang/setup-rust-toolchain@1fbea72663f6d4c03efaab13560c8a24cfd2a7cc # v1.9.0
with:
toolchain: stable
rustflags: -C target-feature=-crt-static
- name: Install zigbuild
run: |
pip3 install ziglang
cargo install --locked cargo-zigbuild
- name: Compile
run: |
rustup target add x86_64-unknown-linux-musl
export GIT_COMMIT_HASH=${{ github.sha }} && \
export GIT_COMMIT_BRANCH=${{ github.ref_name }} && \
cargo zigbuild --target x86_64-unknown-linux-musl --locked -rF production
- name: Build and push image - name: Build and push image
uses: https://github.com/docker/build-push-action@2cdde995de11925a030ce8070c3d77a52ffcf1c0 # v5.3.0 uses: docker/build-push-action@5cd11c3a4ced054e52742c5fd54dca954e0edd85 # v6.7.0
with: with:
context: . context: .
platforms: linux/amd64 platforms: linux/amd64
push: true push: true
tags: git.toast-server.net/toast/kon:main tags: git.toast-server.net/toast/kon:master
build-args: | cache-from: type=registry,ref=git.toast-server.net/toast/kon:cache
CARGO_TOKEN=${{ secrets.CARGO_TOKEN }} cache-to: type=registry,ref=git.toast-server.net/toast/kon:cache,mode=max,image-manifest=true,oci-mediatypes=true
- name: Update Delivery
uses: https://github.com/appleboy/ssh-action@029f5b4aeeeb58fdfe1410a5d17f967dacf36262 # v1.0.3 deploy:
runs-on: host
needs: build
steps:
- name: Deploy update
uses: appleboy/ssh-action@d732991ab09097d8c8f390d91385b0386e619598 # v1.0.3
with: with:
host: ${{ secrets.SSH_HOST }} host: ${{ secrets.SSH_HOST }}
username: ${{ secrets.SSH_USERNAME }} username: ${{ secrets.SSH_USERNAME }}
@ -50,6 +93,6 @@ jobs:
key: ${{ secrets.SSH_KEY }} key: ${{ secrets.SSH_KEY }}
port: ${{ secrets.SSH_PORT }} port: ${{ secrets.SSH_PORT }}
script: | script: |
cd kon && docker compose pull && \ cd kon && docker compose pull bot && \
docker compose down --remove-orphans && docker compose up -d && \ docker compose up -d bot --force-recreate && \
docker image prune -f && docker system prune -f docker system prune -f

4
.gitignore vendored
View File

@ -1,2 +1,6 @@
target target
.env .env
*.log
# Local Gitea Actions
act

11
.vscode/extensions.json vendored Normal file
View File

@ -0,0 +1,11 @@
{
"recommendations": [
"fill-labs.dependi",
"usernamehw.errorlens",
"tamasfe.even-better-toml",
"GitHub.vscode-pull-request-github",
"rust-lang.rust-analyzer",
"redhat.vscode-yaml",
"sumneko.lua"
]
}

View File

@ -1,6 +1,4 @@
{ {
"rust-analyzer.linkedProjects": [ "rust-analyzer.showUnlinkedFileNotification": false,
"./Cargo.toml" "rust-analyzer.linkedProjects": ["./Cargo.toml"]
],
"rust-analyzer.showUnlinkedFileNotification": false
} }

1208
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@ -1,21 +1,28 @@
[package] [package]
name = "kon" name = "kon"
version = "0.2.7" version = "0.3.20"
edition = "2021" edition = "2021"
[dependencies] [dependencies]
cargo_toml = "0.19.2" bb8 = "0.8.5"
gamedig = "0.5.0" bb8-postgres = "0.8.1"
bb8-redis = "0.17.0"
cargo_toml = "0.20.4"
feed-rs = "2.1.0"
once_cell = "1.19.0" once_cell = "1.19.0"
poise = "0.6.1" poise = "0.6.1"
reqwest = { version = "0.12.3", features = ["json"] } regex = "1.10.6"
serde = "1.0.197" reqwest = { version = "0.12.7", features = ["json", "native-tls-vendored"] }
serde_json = "1.0.115" serde = "1.0.210"
sysinfo = "0.30.8" serde_json = "1.0.128"
tokenservice-client = { version = "0.2.0", registry = "gitea" } sysinfo = "0.31.4"
tokio = { version = "1.37.0", features = ["macros", "signal", "rt-multi-thread"] } tokenservice-client = { version = "0.4.0", registry = "gitea" }
tokio-postgres = "0.7.10" tokio = { version = "1.40.0", features = ["macros", "signal", "rt-multi-thread"] }
uptime_lib = "0.3.0" tokio-postgres = "0.7.12"
uptime_lib = "0.3.1"
[features]
production = []
[[bin]] [[bin]]
name = "kon" name = "kon"

View File

@ -1,16 +1,10 @@
FROM rust:1.77-alpine3.19@sha256:d4c2b0a1544462f40b6179aedff4f5485a019a213907c8590ed77d1b6145a29c AS compiler FROM scratch AS base
ENV RUSTFLAGS="-C target-feature=-crt-static" WORKDIR /builder
ARG CARGO_TOKEN
RUN apk add --no-cache openssl-dev musl-dev
WORKDIR /usr/src/kon
COPY . . COPY . .
RUN mkdir -p .cargo && \
printf '[registries.gitea]\nindex = "sparse+https://git.toast-server.net/api/packages/toast/cargo/"\ntoken = "Bearer %s"\n' "$CARGO_TOKEN" >> .cargo/config.toml
RUN cargo fetch && cargo build -r
FROM alpine:3.19@sha256:c5b1261d6d3e43071626931fc004f70149baeba2c8ec672bd4f27761f8e1ad6b FROM alpine:3.20
RUN apk add --no-cache openssl-dev libgcc LABEL org.opencontainers.image.source="https://git.toast-server.net/toast/Kon"
RUN apk add --no-cache libgcc fluidsynth
WORKDIR /kon WORKDIR /kon
COPY --from=compiler /usr/src/kon/target/release/kon . COPY --from=base /builder/target/x86_64-unknown-linux-musl/release/kon .
COPY --from=compiler /usr/src/kon/Cargo.toml .
CMD [ "./kon" ] CMD [ "./kon" ]

26
build.rs Normal file
View File

@ -0,0 +1,26 @@
fn main() {
#[cfg(feature = "production")]
{
if let Ok(git_commit_hash) = std::env::var("GIT_COMMIT_HASH") {
println!("cargo:rustc-env=GIT_COMMIT_HASH={}", &git_commit_hash[..7]);
} else {
println!("cargo:warning=GIT_COMMIT_HASH not found");
println!("cargo:rustc-env=GIT_COMMIT_HASH=no_env_set");
}
}
{
let git_branch = std::process::Command::new("git")
.args(&["rev-parse", "--abbrev-ref", "HEAD"])
.output()
.expect("Command execution failed");
if git_branch.status.success() {
let git_branch = String::from_utf8(git_branch.stdout).expect("Invalid UTF-8 sequence").trim().to_string();
println!("cargo:rustc-env=GIT_COMMIT_BRANCH={}", &git_branch);
} else {
println!("cargo:warning=GIT_COMMIT_BRANCH not found");
println!("cargo:rustc-env=GIT_COMMIT_BRANCH=no_env_set");
}
}
}

View File

@ -1,22 +1,14 @@
services: services:
bot: bot:
container_name: kon container_name: kon
#image: 'git.toast-server.net/toast/kon:main' #image: git.toast-server.net/toast/kon:master
build: . build: .
env_file:
- .env
restart: unless-stopped restart: unless-stopped
depends_on: depends_on:
- db - cache
db: cache:
container_name: kon-database container_name: kon-redis
image: postgres:16.2-alpine3.19@sha256:1d74239810c19ed0dbb317acae40974e673865b2d565a0d369e206159d483957 image: redis/redis-stack-server:7.4.0-v0
restart: unless-stopped restart: unless-stopped
ports: ports:
- 37930:5432/tcp - 37935:6379/tcp
volumes:
- /var/lib/docker/volumes/kon-database:/var/lib/postgresql/data:rw
environment:
POSTGRES_USER: ${POSTGRES_USER}
POSTGRES_PASSWORD: ${POSTGRES_PASSWORD}
POSTGRES_DB: ${POSTGRES_DB}

View File

@ -18,6 +18,7 @@
"branchTopic": "{{{datasource}}}-{{{depName}}}-vulnerability", "branchTopic": "{{{datasource}}}-{{{depName}}}-vulnerability",
"prCreation": "immediate" "prCreation": "immediate"
}, },
"enabled": false,
"pinDigests": true, "pinDigests": true,
"ignoreTests": true, "ignoreTests": true,
"pruneStaleBranches": true, "pruneStaleBranches": true,
@ -25,9 +26,7 @@
"automerge": true, "automerge": true,
"automergeType": "pr", "automergeType": "pr",
"automergeStrategy": "squash", "automergeStrategy": "squash",
"automergeSchedule": [ "automergeSchedule": ["at any time"],
"at any time"
],
"packageRules": [ "packageRules": [
{ {
"matchManagers": ["cargo"], "matchManagers": ["cargo"],

2
run.sh
View File

@ -1,3 +1,3 @@
#!/bin/bash #!/bin/bash
export $(grep -v '^#' .env | xargs) && cargo run kon_dev clear && cargo run kon_dev

18
src/commands.rs Normal file
View File

@ -0,0 +1,18 @@
use crate::Error;
pub mod ilo;
pub mod midi;
pub mod ping;
pub mod status;
pub mod uptime;
/// Deploy the commands globally or in a guild
#[poise::command(
prefix_command,
owners_only,
guild_only
)]
pub async fn deploy(ctx: poise::Context<'_, (), Error>) -> Result<(), Error> {
poise::builtins::register_application_commands_buttons(ctx).await?;
Ok(())
}

View File

@ -1,265 +0,0 @@
use crate::{
Error,
internals::utils::EMBED_COLOR,
models::gameservers::Gameservers
};
use poise::serenity_prelude::{
futures::{
stream::iter,
future::ready,
Stream,
StreamExt
},
builder::CreateActionRow,
builder::CreateEmbed,
};
use poise::{
CreateReply,
serenity_prelude,
serenity_prelude::ButtonStyle,
ChoiceParameter
};
#[derive(Debug, ChoiceParameter)]
enum GameNames {
#[name = "Minecraft"]
Minecraft
}
/// Manage the game servers for this guild
#[poise::command(
slash_command,
subcommands("add", "remove", "update", "list"),
subcommand_required,
guild_only,
default_member_permissions = "MANAGE_GUILD",
required_permissions = "MANAGE_GUILD" // No clue if this is needed or not. Just leaving it here for now
)]
pub async fn gameserver(_: poise::Context<'_, (), Error>) -> Result<(), Error> {
Ok(())
}
/// Add a game server to the database
#[poise::command(slash_command)]
pub async fn add(
ctx: poise::Context<'_, (), Error>,
#[description = "Server name as shown in-game or friendly name"] server_name: String,
#[description = "Which game is this server running?"] game_name: GameNames,
#[description = "IP address/domain of the server (Include the port if it has one, e.g 127.0.0.1:8080)"] ip_address: String
) -> Result<(), Error> {
let action_row = CreateActionRow::Buttons(vec![
serenity_prelude::CreateButton::new("add-confirm")
.style(ButtonStyle::Success)
.label("Yes"),
serenity_prelude::CreateButton::new("add-cancel")
.style(ButtonStyle::Danger)
.label("No")
]);
let reply = CreateReply::default()
.embed(CreateEmbed::new()
.title("Does this look correct?")
.description(format!("
**Server name:** `{}`
**Game name:** `{}`
**IP Address:** `{}`
", server_name, game_name.name(), ip_address))
.color(EMBED_COLOR)
)
.components(vec![action_row]);
ctx.send(reply).await?;
while let Some(collector) = serenity_prelude::ComponentInteractionCollector::new(ctx)
.guild_id(ctx.guild_id().unwrap())
.author_id(ctx.author().id)
.timeout(std::time::Duration::from_secs(30))
.await
{
if collector.data.custom_id == "add-confirm" {
let result = Gameservers::add_server(
ctx.guild_id().unwrap().into(),
server_name.as_str(),
game_name.name(),
ip_address.as_str()
).await;
let mut msg = collector.message.clone();
match result {
Ok(_) => {
msg.edit(
ctx,
serenity_prelude::EditMessage::new()
.content("*Confirmed, added the server to database*")
.embeds(Vec::new())
.components(Vec::new())
).await?;
},
Err(y) => {
msg.edit(
ctx,
serenity_prelude::EditMessage::new()
.content(format!("*Error adding server to database:\n`{}`*", y))
.embeds(Vec::new())
.components(Vec::new())
).await?;
}
}
} else if collector.data.custom_id == "add-cancel" {
let mut msg = collector.message.clone();
msg.edit(
ctx,
serenity_prelude::EditMessage::new()
.content("*Command cancelled*")
.embeds(Vec::new())
.components(Vec::new())
).await?;
}
}
Ok(())
}
/// Remove a game server from the database
#[poise::command(slash_command)]
pub async fn remove(
ctx: poise::Context<'_, (), Error>,
#[description = "Server name"] #[autocomplete = "ac_server_name"] server_name: String
) -> Result<(), Error> {
let reply = CreateReply::default()
.embed(CreateEmbed::new()
.title("Are you sure you want to remove this server?")
.description(format!("**Server name:** `{}`", server_name))
.color(EMBED_COLOR)
)
.components(vec![
CreateActionRow::Buttons(vec![
serenity_prelude::CreateButton::new("delete-confirm")
.style(ButtonStyle::Success)
.label("Yes"),
serenity_prelude::CreateButton::new("delete-cancel")
.style(ButtonStyle::Danger)
.label("No")
])
]);
ctx.send(reply).await?;
while let Some(collector) = serenity_prelude::ComponentInteractionCollector::new(ctx)
.guild_id(ctx.guild_id().unwrap())
.author_id(ctx.author().id)
.timeout(std::time::Duration::from_secs(30))
.await
{
if collector.data.custom_id == "delete-confirm" {
let result = Gameservers::remove_server(ctx.guild_id().unwrap().into(), server_name.as_str()).await;
let mut msg = collector.message.clone();
match result {
Ok(_) => {
msg.edit(
ctx,
serenity_prelude::EditMessage::new()
.content("*Confirmed, removed the server from database*")
.embeds(Vec::new())
.components(Vec::new())
).await?;
},
Err(y) => {
msg.edit(
ctx,
serenity_prelude::EditMessage::new()
.content(format!("*Error removing server from database:\n`{}`*", y))
.embeds(Vec::new())
.components(Vec::new())
).await?;
}
}
} else if collector.data.custom_id == "delete-cancel" {
let mut msg = collector.message.clone();
msg.edit(
ctx,
serenity_prelude::EditMessage::new()
.content("*Command cancelled*")
.embeds(Vec::new())
.components(Vec::new())
).await?;
}
}
Ok(())
}
/// Update a game server in the database
#[poise::command(slash_command)]
pub async fn update(
ctx: poise::Context<'_, (), Error>,
#[description = "Server name"] #[autocomplete = "ac_server_name"] server_name: String,
#[description = "Game name"] game_name: GameNames,
#[description = "IP address"] ip_address: String
) -> Result<(), Error> {
let result = Gameservers::update_server(
ctx.guild_id().unwrap().into(),
&server_name,
&game_name.name(),
&ip_address
).await;
match result {
Ok(_) => {
ctx.send(CreateReply::default().content("Updated the server in database.")).await?;
},
Err(y) => {
ctx.send(CreateReply::default().content(format!("Error updating the server in database: {:?}", y))).await?;
}
}
Ok(())
}
/// List all the available game servers for this guild
#[poise::command(slash_command)]
pub async fn list(ctx: poise::Context<'_, (), Error>) -> Result<(), Error> {
let servers = Gameservers::list_servers(ctx.guild_id().unwrap().into()).await?;
let mut embed_fields = Vec::new();
for server in servers {
embed_fields.push(
(server.server_name, format!("Game: `{}`\nIP: `{}`", server.game_name, server.ip_address), true)
);
}
ctx.send(CreateReply::default()
.embed(CreateEmbed::new()
.title("List of registered gameservers")
.fields(embed_fields)
.color(EMBED_COLOR)
)
).await?;
Ok(())
}
pub async fn ac_server_name<'a>(
ctx: poise::Context<'_, (), Error>,
partial: &'a str
) -> impl Stream<Item = String> + 'a {
let result = Gameservers::get_server_names(ctx.guild_id().unwrap().into()).await;
let names = match result {
Ok(names_vector) => names_vector,
Err(y) => {
println!("Error retrieving server names: {:?}", y);
Vec::new()
}
};
iter(names)
.filter(move |server_name| ready(server_name.starts_with(partial)))
.map(|server_name| server_name.to_string())
}

319
src/commands/ilo.rs Normal file
View File

@ -0,0 +1,319 @@
use crate::{
Error,
internals::{
config::BINARY_PROPERTIES,
utils::token_path
}
};
use reqwest::{
ClientBuilder,
Error as ReqError
};
use serde::{
Serialize,
Deserialize
};
use poise::{
CreateReply,
serenity_prelude::{
CreateEmbed,
Timestamp
}
};
#[derive(Serialize, Deserialize)]
struct Chassis {
#[serde(rename = "Fans")]
fans: Vec<Fan>,
#[serde(rename = "Temperatures")]
temperatures: Vec<Temperature>
}
#[derive(Serialize, Deserialize)]
struct Fan {
#[serde(rename = "CurrentReading")]
current_reading: i32,
#[serde(rename = "FanName")]
fan_name: String,
#[serde(rename = "Status")]
status: Status,
}
#[derive(Serialize, Deserialize)]
struct Temperature {
#[serde(rename = "CurrentReading")]
current_reading: i32,
#[serde(rename = "Name")]
name: String,
#[serde(rename = "ReadingCelsius")]
reading_celsius: i32,
#[serde(rename = "Status")]
status: Status,
#[serde(rename = "Units")]
units: String,
#[serde(rename = "UpperThresholdCritical")]
upper_threshold_critical: i32,
#[serde(rename = "UpperThresholdFatal")]
upper_threshold_fatal: i32
}
#[derive(Serialize, Deserialize)]
struct Status {
#[serde(rename = "Health")]
health: Option<String>,
#[serde(rename = "State")]
state: String
}
#[derive(Serialize, Deserialize, Debug)]
struct Power {
#[serde(rename = "PowerCapacityWatts")]
power_capacity_watts: i32,
#[serde(rename = "PowerConsumedWatts")]
power_consumed_watts: i32,
#[serde(rename = "PowerMetrics")]
power_metrics: PowerMetrics
}
#[derive(Serialize, Deserialize, Debug)]
struct PowerMetrics {
#[serde(rename = "AverageConsumedWatts")]
average_consumed_watts: i32,
#[serde(rename = "MaxConsumedWatts")]
max_consumed_watts: i32,
#[serde(rename = "MinConsumedWatts")]
min_consumed_watts: i32
}
#[derive(Serialize, Deserialize)]
struct System {
#[serde(rename = "Memory")]
memory: Memory,
#[serde(rename = "Model")]
model: String,
#[serde(rename = "Oem")]
oem: Oem,
#[serde(rename = "PowerState")]
power_state: String,
#[serde(rename = "ProcessorSummary")]
processor_summary: ProcessorSummary
}
#[derive(Serialize, Deserialize)]
struct Memory {
#[serde(rename = "TotalSystemMemoryGB")]
total_system_memory: i32
}
#[derive(Serialize, Deserialize)]
struct ProcessorSummary {
#[serde(rename = "Count")]
count: i32,
#[serde(rename = "Model")]
cpu: String
}
#[derive(Serialize, Deserialize)]
struct Oem {
#[serde(rename = "Hp")]
hp: Hp
}
#[derive(Serialize, Deserialize)]
struct Hp {
#[serde(rename = "PostState")]
post_state: String
}
#[derive(Serialize, Deserialize)]
struct Event {
#[serde(rename = "Status")]
status: Status
}
const ILO_HOSTNAME: &str = "POMNI";
enum RedfishEndpoint {
Thermal,
Power,
System,
EventService
}
impl RedfishEndpoint {
fn url(&self) -> String {
match self {
RedfishEndpoint::Thermal => "Chassis/1/Thermal".to_string(),
RedfishEndpoint::Power => "Chassis/1/Power".to_string(),
RedfishEndpoint::System => "Systems/1".to_string(),
RedfishEndpoint::EventService => "EventService".to_string()
}
}
}
async fn ilo_data(endpoint: RedfishEndpoint) -> Result<Box<dyn std::any::Any + Send>, ReqError> {
let client = ClientBuilder::new()
.danger_accept_invalid_certs(true)
.build()
.unwrap();
let res = client
.get(format!("https://{}/redfish/v1/{}", token_path().await.ilo_ip, endpoint.url()))
.basic_auth(token_path().await.ilo_user, Some(token_path().await.ilo_pw))
.send()
.await
.unwrap();
match endpoint {
RedfishEndpoint::Thermal => {
let body: Chassis = res.json().await.unwrap();
Ok(Box::new(body))
}
RedfishEndpoint::Power => {
let body: Power = res.json().await.unwrap();
Ok(Box::new(body))
}
RedfishEndpoint::System => {
let body: System = res.json().await.unwrap();
Ok(Box::new(body))
}
RedfishEndpoint::EventService => {
let body: Event = res.json().await.unwrap();
Ok(Box::new(body))
}
}
}
/// Retrieve data from the HP iLO4 interface
#[poise::command(
slash_command,
subcommands("temperature", "power", "system")
)]
pub async fn ilo(_: poise::Context<'_, (), Error>) -> Result<(), Error> {
Ok(())
}
/// Retrieve the server's temperature data
#[poise::command(slash_command)]
pub async fn temperature(ctx: poise::Context<'_, (), Error>) -> Result<(), Error> {
ctx.defer().await?;
let ilo = ilo_data(RedfishEndpoint::Thermal).await.unwrap();
let data = ilo.downcast_ref::<Chassis>().unwrap();
let mut tempdata = String::new();
let mut fandata = String::new();
let allowed_sensors = [
"01-Inlet Ambient",
"04-P1 DIMM 1-6",
"14-Chipset Zone"
];
for temp in &data.temperatures {
if temp.reading_celsius == 0 || !allowed_sensors.contains(&temp.name.as_str()) {
continue;
}
let name = match temp.name.as_str() {
"01-Inlet Ambient" => "Inlet Ambient",
"04-P1 DIMM 1-6" => "P1 DIMM 1-6",
"14-Chipset Zone" => "Chipset Zone",
_ => "Unknown Sensor"
};
tempdata.push_str(&format!("**{}:** `{}°C`\n", name, temp.reading_celsius));
}
for fan in &data.fans {
if fan.current_reading == 0 {
continue;
}
fandata.push_str(&format!("**{}:** `{}%`\n", fan.fan_name, fan.current_reading));
}
ctx.send(CreateReply::default().embed(
CreateEmbed::new()
.color(BINARY_PROPERTIES.embed_color)
.timestamp(Timestamp::now())
.title(format!("{} - Temperatures", ILO_HOSTNAME))
.fields(vec![
("Temperatures", tempdata, false),
("Fans", fandata, false)
])
)).await?;
Ok(())
}
/// Retrieve the server's power data
#[poise::command(slash_command)]
pub async fn power(ctx: poise::Context<'_, (), Error>) -> Result<(), Error> {
ctx.defer().await?;
let ilo = ilo_data(RedfishEndpoint::Power).await.unwrap();
let data = ilo.downcast_ref::<Power>().unwrap();
let mut powerdata = String::new();
powerdata.push_str(&format!("**Power Capacity:** `{}w`\n", &data.power_capacity_watts));
powerdata.push_str(&format!("**Power Consumed:** `{}w`\n", &data.power_consumed_watts));
powerdata.push_str(&format!("**Average Power:** `{}w`\n", &data.power_metrics.average_consumed_watts));
powerdata.push_str(&format!("**Max Consumed:** `{}w`\n", &data.power_metrics.max_consumed_watts));
powerdata.push_str(&format!("**Min Consumed:** `{}w`", &data.power_metrics.min_consumed_watts));
ctx.send(CreateReply::default().embed(
CreateEmbed::new()
.color(BINARY_PROPERTIES.embed_color)
.timestamp(Timestamp::now())
.title(format!("{} - Power", ILO_HOSTNAME))
.description(powerdata)
)).await?;
Ok(())
}
/// Retrieve the server's system data
#[poise::command(slash_command)]
pub async fn system(ctx: poise::Context<'_, (), Error>) -> Result<(), Error> {
ctx.defer().await?;
let (ilo_sys, ilo_event) = tokio::join!(
ilo_data(RedfishEndpoint::System),
ilo_data(RedfishEndpoint::EventService)
);
let ilo_sys = ilo_sys.unwrap();
let ilo_event = ilo_event.unwrap();
let system_data = ilo_sys.downcast_ref::<System>().unwrap();
let event_data = ilo_event.downcast_ref::<Event>().unwrap();
let mut data = String::new();
let post_state = match system_data.oem.hp.post_state.as_str() {
"FinishedPost" => "Finished POST",
"InPost" => "In POST (Booting)",
"PowerOff" => "Powered off",
_ => "Unknown State"
};
if system_data.oem.hp.post_state != "FinishedPost" {
println!("iLO:PostState = {}", system_data.oem.hp.post_state);
}
data.push_str(&format!("**Health:** `{}`\n", event_data.status.health.as_ref().unwrap_or(&"Unknown".to_string())));
data.push_str(&format!("**POST:** `{}`\n", post_state));
data.push_str(&format!("**Power:** `{}`\n", &system_data.power_state));
data.push_str(&format!("**Model:** `{}`", &system_data.model));
ctx.send(CreateReply::default().embed(
CreateEmbed::new()
.color(BINARY_PROPERTIES.embed_color)
.timestamp(Timestamp::now())
.title(format!("{} - System", ILO_HOSTNAME))
.description(data)
.fields(vec![
(format!("CPU ({}x)", system_data.processor_summary.count), system_data.processor_summary.cpu.trim().to_string(), true),
("RAM".to_string(), format!("{} GB", system_data.memory.total_system_memory), true)
])
)).await?;
Ok(())
}

101
src/commands/midi.rs Normal file
View File

@ -0,0 +1,101 @@
use crate::{
Error,
internals::utils::{
mention_dev,
format_bytes
}
};
use regex::Regex;
use std::{
os::unix::fs::MetadataExt,
fs::{
write,
remove_file,
metadata
}
};
use poise::{
CreateReply,
serenity_prelude::CreateAttachment
};
/// Convert MIDI file to WAV
#[poise::command(context_menu_command = "MIDI -> WAV")]
pub async fn midi_to_wav(
ctx: poise::Context<'_, (), Error>,
#[description = "MIDI file to be converted"] message: poise::serenity_prelude::Message
) -> Result<(), Error> {
let re = Regex::new(r"(?i)\.mid$").unwrap();
if !message.embeds.is_empty() || message.attachments.is_empty() || !re.is_match(&message.attachments[0].filename) {
ctx.reply("That ain't a MIDI file! What are you even doing??").await?;
return Ok(());
}
ctx.defer().await?;
let bytes = match message.attachments[0].download().await {
Ok(bytes) => bytes,
Err(y) => {
ctx.send(CreateReply::default()
.content(format!(
"Download failed, ask {} to check console for more information!",
mention_dev(ctx).unwrap_or_default()
))
)
.await.unwrap();
return Err(Error::from(format!("Failed to download the file: {}", y)))
}
};
let midi_path = &message.attachments[0].filename;
write(midi_path, bytes)?;
let wav_path = re.replace(&midi_path, ".wav");
let sf2_path = "/tmp/FluidR3_GM.sf2";
write(sf2_path, include_bytes!("../internals/assets/FluidR3_GM.sf2"))?;
let output = std::process::Command::new("fluidsynth")
.args(&["-ni", sf2_path, midi_path, "-F", &wav_path])
.output();
// Just to add an info to console to tell what the bot is doing when MIDI file is downloaded.
println!("Discord[{}:{}]: Processing MIDI file: \"{}\"", ctx.guild().unwrap().name, ctx.command().qualified_name, midi_path);
match output {
Ok(_) => {
let reply = ctx.send(CreateReply::default()
.attachment(CreateAttachment::path(&*wav_path).await.unwrap())
).await;
if reply.is_err() {
println!(
"Discord[{}:{}]: Processed file couldn't be uploaded back to Discord channel due to upload limit",
ctx.guild().unwrap().name, ctx.command().qualified_name
);
ctx.send(CreateReply::default()
.content(format!(
"Couldn't upload the processed file (`{}`, `{}`) due to upload limit",
&*wav_path, format_bytes(metadata(&*wav_path).unwrap().size())
))
).await.unwrap();
} else if reply.is_ok() {
remove_file(midi_path)?;
remove_file(&*wav_path)?;
}
},
Err(y) => {
ctx.send(CreateReply::default()
.content("Command didn't execute successfully, check console for more information!")
).await.unwrap();
return Err(Error::from(format!("Midi conversion failed: {}", y)))
}
}
Ok(())
}

View File

@ -1,4 +0,0 @@
pub mod ping;
pub mod status;
pub mod uptime;
pub mod gameserver;

View File

@ -1,41 +1,23 @@
use crate::{ use crate::{
Error, Error,
models::gameservers::Gameservers, internals::{
commands::gameserver::ac_server_name, config::BINARY_PROPERTIES,
internals::utils::EMBED_COLOR, http::HttpClient,
internals::http::HttpClient, utils::token_path
internals::utils::token_path }
}; };
use serde_json::Value;
use std::collections::HashMap; use std::collections::HashMap;
use tokio::join; use tokio::join;
use poise::CreateReply; use poise::{
use poise::serenity_prelude::builder::CreateEmbed; CreateReply,
use serde::Deserialize; serenity_prelude::builder::CreateEmbed
use serde_json::Value; };
#[derive(Deserialize)]
struct MinecraftQueryData {
motd: Option<MinecraftMotd>,
players: Option<MinecraftPlayers>,
version: Option<String>,
online: bool
}
#[derive(Deserialize)]
struct MinecraftMotd {
clean: Vec<String>
}
#[derive(Deserialize, Clone, Copy)]
struct MinecraftPlayers {
online: i32,
max: i32
}
async fn pms_serverstatus(url: &str) -> Result<Vec<(String, Vec<Value>)>, Error> { async fn pms_serverstatus(url: &str) -> Result<Vec<(String, Vec<Value>)>, Error> {
let client = HttpClient::new(); let client = HttpClient::new();
let req = client.get(url).await?; let req = client.get(url, "PMS-Status").await?;
let response = req.json::<HashMap<String, Value>>().await?; let response = req.json::<HashMap<String, Value>>().await?;
let data = response["data"].as_array().unwrap(); let data = response["data"].as_array().unwrap();
@ -84,22 +66,11 @@ fn process_pms_statuses(servers: Vec<(String, Vec<Value>)>) -> Vec<(String, Stri
statuses statuses
} }
async fn gs_query_minecraft(server_ip: &str) -> Result<MinecraftQueryData, Error> {
let client = HttpClient::new();
let req = client.get(&format!("https://api.mcsrvstat.us/2/{}", server_ip)).await?;
if req.status().is_success() {
let data: MinecraftQueryData = req.json().await?;
Ok(data)
} else if req.status().is_server_error() {
Err(Error::from("Webserver returned a 5xx error."))
} else {
Err(Error::from("Failed to query the server."))
}
}
/// Query the server statuses /// Query the server statuses
#[poise::command(slash_command, subcommands("wg", "gs"), subcommand_required)] #[poise::command(
slash_command,
subcommands("wg")
)]
pub async fn status(_: poise::Context<'_, (), Error>) -> Result<(), Error> { pub async fn status(_: poise::Context<'_, (), Error>) -> Result<(), Error> {
Ok(()) Ok(())
} }
@ -109,7 +80,7 @@ pub async fn status(_: poise::Context<'_, (), Error>) -> Result<(), Error> {
pub async fn wg(ctx: poise::Context<'_, (), Error>) -> Result<(), Error> { pub async fn wg(ctx: poise::Context<'_, (), Error>) -> Result<(), Error> {
let pms_asia = token_path().await.wg_pms; let pms_asia = token_path().await.wg_pms;
let pms_eu = pms_asia.replace("asia", "eu"); let pms_eu = pms_asia.replace("asia", "eu");
let embed = CreateEmbed::new().color(EMBED_COLOR); let embed = CreateEmbed::new().color(BINARY_PROPERTIES.embed_color);
let (servers_asia, servers_eu) = join!(pms_serverstatus(&pms_asia), pms_serverstatus(&pms_eu)); let (servers_asia, servers_eu) = join!(pms_serverstatus(&pms_asia), pms_serverstatus(&pms_eu));
let joined_pms_servers = [servers_eu.unwrap(), servers_asia.unwrap()].concat(); let joined_pms_servers = [servers_eu.unwrap(), servers_asia.unwrap()].concat();
@ -119,46 +90,3 @@ pub async fn wg(ctx: poise::Context<'_, (), Error>) -> Result<(), Error> {
Ok(()) Ok(())
} }
/// Retrieve the given server data from gameservers DB
#[poise::command(slash_command, guild_only)]
pub async fn gs(
ctx: poise::Context<'_, (), Error>,
#[description = "Server name"] #[autocomplete = "ac_server_name"] server_name: String
) -> Result<(), Error> {
let server_data = Gameservers::get_server_data(ctx.guild_id().unwrap().into(), &server_name).await?;
// Extract values from a Vec above
let game_name = &server_data[1];
let ip_address = &server_data[2];
match game_name.as_str() {
"Minecraft" => {
let result = gs_query_minecraft(ip_address).await?;
let embed = CreateEmbed::new().color(EMBED_COLOR);
if result.online {
let mut embed_fields = Vec::new();
embed_fields.push(("Server IP".to_owned(), ip_address.to_owned(), true));
embed_fields.push((format!("\u{200b}"), format!("\u{200b}"), true));
embed_fields.push(("MOTD".to_owned(), format!("{}", result.motd.unwrap().clean[0]), true));
embed_fields.push(("Players".to_owned(), format!("**{}**/**{}**", result.players.unwrap().online, result.players.clone().unwrap().max), true));
embed_fields.push(("Version".to_owned(), result.version.unwrap(), true));
ctx.send(CreateReply::default()
.embed(embed
.title(server_name)
.fields(embed_fields)
)
).await?;
} else {
ctx.send(CreateReply::default()
.content(format!("**{}** (`{}`) is currently offline or unreachable.", server_name, ip_address))
).await?;
}
},
_ => {}
}
Ok(())
}

View File

@ -1,20 +1,52 @@
use crate::{ use crate::{
Error, Error,
GIT_COMMIT_HASH,
GIT_COMMIT_BRANCH,
internals::utils::{ internals::utils::{
format_duration, BOT_VERSION,
concat_message, format_duration
BOT_VERSION
} }
}; };
use sysinfo::System; use sysinfo::System;
use uptime_lib::get; use uptime_lib::get;
use std::time::{ use std::{
fs::File,
path::Path,
time::{
Duration, Duration,
SystemTime, SystemTime,
UNIX_EPOCH UNIX_EPOCH
},
io::{
BufRead,
BufReader
}
}; };
fn get_os_info() -> String {
let path = Path::new("/etc/os-release");
let mut name = "BoringOS".to_string();
let mut version = "v0.0".to_string();
if let Ok(file) = File::open(&path) {
let reader = BufReader::new(file);
for line in reader.lines() {
if let Ok(line) = line {
if line.starts_with("NAME=") {
name = line.split('=').nth(1).unwrap_or_default().trim_matches('"').to_string();
} else if line.starts_with("VERSION=") {
version = line.split('=').nth(1).unwrap_or_default().trim_matches('"').to_string();
} else if line.starts_with("VERSION_ID=") {
version = line.split('=').nth(1).unwrap_or_default().trim_matches('"').to_string();
}
}
}
}
format!("{} {}", name, version)
}
/// Retrieve host and bot uptimes /// Retrieve host and bot uptimes
#[poise::command(slash_command)] #[poise::command(slash_command)]
pub async fn uptime(ctx: poise::Context<'_, (), Error>) -> Result<(), Error> { pub async fn uptime(ctx: poise::Context<'_, (), Error>) -> Result<(), Error> {
@ -25,6 +57,9 @@ pub async fn uptime(ctx: poise::Context<'_, (), Error>) -> Result<(), Error> {
// Fetch system's uptime // Fetch system's uptime
let sys_uptime = get().unwrap().as_secs(); let sys_uptime = get().unwrap().as_secs();
// Fetch system's processor
let cpu = sys.cpus();
// Fetch bot's process uptime // Fetch bot's process uptime
let curr_pid = sysinfo::get_current_pid().unwrap(); let curr_pid = sysinfo::get_current_pid().unwrap();
let now = SystemTime::now(); let now = SystemTime::now();
@ -35,11 +70,13 @@ pub async fn uptime(ctx: poise::Context<'_, (), Error>) -> Result<(), Error> {
} }
let stat_msg = vec![ let stat_msg = vec![
format!("**{} {}**", _bot.name, BOT_VERSION.as_str()), format!("**{} {}** `{}:{}`", _bot.name, BOT_VERSION.as_str(), GIT_COMMIT_HASH, GIT_COMMIT_BRANCH),
format!(">>> System: `{}`", format_duration(sys_uptime)), format!(">>> System: `{}`", format_duration(sys_uptime)),
format!("Process: `{}`", format_duration(proc_uptime)) format!("Process: `{}`", format_duration(proc_uptime)),
format!("CPU: `{}`", format!("{}", cpu[0].brand())),
format!("OS: `{}`", get_os_info())
]; ];
ctx.reply(concat_message(stat_msg)).await?; ctx.reply(stat_msg.join("\n")).await?;
Ok(()) Ok(())
} }

1
src/controllers.rs Normal file
View File

@ -0,0 +1 @@
pub mod cache;

91
src/controllers/cache.rs Normal file
View File

@ -0,0 +1,91 @@
use crate::internals::utils::token_path;
use bb8_redis::{
bb8::Pool,
redis::cmd,
redis::RedisError,
redis::RedisResult,
redis::AsyncCommands,
RedisConnectionManager
};
use tokio::time::{
Duration,
sleep
};
#[derive(Debug)]
pub struct RedisController {
pool: Pool<RedisConnectionManager>
}
impl RedisController {
pub async fn new() -> Result<Self, RedisError> {
let manager = RedisConnectionManager::new(token_path().await.redis_uri.as_str())?;
let pool = Self::create_pool(manager).await;
Ok(Self { pool })
}
async fn create_pool(manager: RedisConnectionManager) -> Pool<RedisConnectionManager> {
let mut backoff = 1;
loop {
match Pool::builder().max_size(20).retry_connection(true).build(manager.clone()).await {
Ok(pool) => {
match pool.get().await {
Ok(mut conn) => {
let ping: RedisResult<String> = cmd("PING").query_async(&mut *conn).await;
match ping {
Ok(_) => {
println!("Redis[Info]: Successfully connected");
return pool.clone();
},
Err(e) => {
eprintln!("Redis[Error]: {}, retrying in {} seconds", e, backoff);
Self::apply_backoff(&mut backoff).await;
}
}
},
Err(e) => {
eprintln!("Redis[ConnError]: {}, retrying in {} seconds", e, backoff);
Self::apply_backoff(&mut backoff).await;
}
}
}
Err(e) => {
eprintln!("Redis[PoolError]: {}, retrying in {} seconds", e, backoff);
Self::apply_backoff(&mut backoff).await;
}
}
}
}
async fn apply_backoff(backoff: &mut u64) {
sleep(Duration::from_secs(*backoff)).await;
if *backoff < 64 {
*backoff *= 2;
}
}
/// Get a key from the cache
pub async fn get(&self, key: &str) -> RedisResult<Option<String>> {
let mut conn = self.pool.get().await.unwrap();
conn.get(key).await
}
pub async fn del(&self, key: &str) -> RedisResult<()> {
let mut conn = self.pool.get().await.unwrap();
conn.del(key).await
}
/// Set a key with a value in the cache
pub async fn set(&self, key: &str, value: &str) -> RedisResult<()> {
let mut conn = self.pool.get().await.unwrap();
conn.set(key, value).await
}
/// Set a key with an expiration time in seconds
pub async fn expire(&self, key: &str, seconds: i64) -> RedisResult<()> {
let mut conn = self.pool.get().await.unwrap();
conn.expire(key, seconds).await
}
}

View File

@ -1,37 +0,0 @@
use crate::internals;
use poise::serenity_prelude::prelude::TypeMapKey;
use tokio_postgres::{Client, NoTls, Error};
pub struct DatabaseController {
pub client: Client
}
impl TypeMapKey for DatabaseController {
type Value = DatabaseController;
}
impl DatabaseController {
pub async fn new() -> Result<DatabaseController, Error> {
let (client, connection) = tokio_postgres::connect(&internals::utils::token_path().await.postgres_uri, NoTls).await?;
tokio::spawn(async move {
if let Err(e) = connection.await {
eprintln!("Connection error: {}", e);
}
});
// Gameservers
client.batch_execute("
CREATE TABLE IF NOT EXISTS gameservers (
server_name VARCHAR(255) NOT NULL,
game_name VARCHAR(255) NOT NULL,
guild_owner BIGINT NOT NULL,
ip_address VARCHAR(255) NOT NULL,
PRIMARY KEY (server_name, guild_owner)
);
").await?;
Ok(DatabaseController { client })
}
}

View File

@ -1 +0,0 @@
pub mod database;

View File

@ -1,3 +1,5 @@
pub mod utils; pub mod config;
pub mod http; pub mod http;
pub mod tasks;
pub mod tsclient; pub mod tsclient;
pub mod utils;

BIN
src/internals/assets/FluidR3_GM.sf2 (Stored with Git LFS) Executable file

Binary file not shown.

53
src/internals/config.rs Normal file
View File

@ -0,0 +1,53 @@
use std::sync::LazyLock;
pub struct ConfigMeta {
pub embed_color: i32,
pub ready_notify: u64,
pub rss_channel: u64,
pub kon_logs: u64,
pub developers: Vec<u64>
}
#[cfg(feature = "production")]
pub static BINARY_PROPERTIES: LazyLock<ConfigMeta> = LazyLock::new(|| ConfigMeta::new());
#[cfg(not(feature = "production"))]
pub static BINARY_PROPERTIES: LazyLock<ConfigMeta> = LazyLock::new(||
ConfigMeta::new()
.embed_color(0xf1d63c)
.ready_notify(865673694184996888)
.rss_channel(865673694184996888)
);
impl ConfigMeta {
fn new() -> Self {
Self {
embed_color: 0x5a99c7,
ready_notify: 865673694184996888,
rss_channel: 865673694184996888,
kon_logs: 1268493237912604672,
developers: vec![
190407856527376384 // toast.ts
]
}
}
// Scalable functions below;
#[cfg(not(feature = "production"))]
fn embed_color(mut self, color: i32) -> Self {
self.embed_color = color;
self
}
#[cfg(not(feature = "production"))]
fn ready_notify(mut self, channel_id: u64) -> Self {
self.ready_notify = channel_id;
self
}
#[cfg(not(feature = "production"))]
fn rss_channel(mut self, channel_id: u64) -> Self {
self.rss_channel = channel_id;
self
}
}

View File

@ -1,30 +1,39 @@
use std::sync::Arc; use std::time::Duration;
use once_cell::sync::Lazy;
use reqwest::{ use reqwest::{
Client, Client,
header::USER_AGENT Response,
Error
}; };
static CUSTOM_USER_AGENT: Lazy<String> = Lazy::new(|| const ERROR_PREFIX: &str = "HTTPClient[Error]:";
format!("Kon/{}/Rust", super::utils::BOT_VERSION.as_str())
);
pub struct HttpClient { pub struct HttpClient(Client);
client: Arc<Client>
}
impl HttpClient { impl HttpClient {
pub fn new() -> Self { pub fn new() -> Self {
Self { Self(Client::new())
client: Arc::new(Client::new())
}
} }
pub async fn get(&self, url: &str) -> Result<reqwest::Response, reqwest::Error> { pub async fn get(&self, url: &str, ua: &str) -> Result<Response, Error> {
let req = self.client.get(url) let response = self.0.get(url).header(
.header(USER_AGENT, CUSTOM_USER_AGENT.as_str()) reqwest::header::USER_AGENT,
format!("Kon ({}-{}) - {}/reqwest", super::utils::BOT_VERSION.as_str(), crate::GIT_COMMIT_HASH, ua)
)
.timeout(Duration::from_secs(30))
.send() .send()
.await?; .await;
Ok(req)
match response {
Ok(res) => Ok(res),
Err(y) if y.is_timeout() => {
eprintln!("{ERROR_PREFIX} Request timed out for \"{}\"", url);
Err(y)
},
Err(y) if y.is_connect() => {
eprintln!("{ERROR_PREFIX} Connection failed for \"{}\"", url);
Err(y)
},
Err(y) => Err(y)
}
} }
} }

47
src/internals/tasks.rs Normal file
View File

@ -0,0 +1,47 @@
mod rss;
pub use rss::rss;
use tokio::task::spawn;
use poise::serenity_prelude::Context;
use std::{
sync::{
Arc,
atomic::{
AtomicBool,
Ordering
}
},
future::Future
};
fn task_info(name: &str, message: &str) {
println!("{}", format!("TaskScheduler[{}]: {}", name, message))
}
fn task_err(name: &str, message: &str) {
eprintln!("{}", format!("TaskScheduler[{}:Error]: {}", name, message))
}
static TASK_RUNNING: AtomicBool = AtomicBool::new(false);
pub async fn run_task<F, T>(ctx: Arc<Context>, task: F)
where
F: Fn(Arc<Context>) -> T + Send + 'static,
T: Future<Output = Result<(), crate::Error>> + Send + 'static
{
let ctx_cl = Arc::clone(&ctx);
if !TASK_RUNNING.load(Ordering::SeqCst) {
TASK_RUNNING.store(true, Ordering::SeqCst);
spawn(async move {
if let Err(y) = task(ctx_cl).await {
eprintln!("TaskScheduler[Main:Error]: Failed to execute the task, error reason: {}", y);
if let Some(source) = y.source() {
eprintln!("TaskScheduler[Main:Error]: Failed to execute the task, this is caused by: {:#?}", source);
}
}
TASK_RUNNING.store(false, Ordering::SeqCst);
});
}
}

161
src/internals/tasks/rss.rs Normal file
View File

@ -0,0 +1,161 @@
mod processor; // Process the feeds and send it off to Discord
mod esxi;
mod github;
mod gportal;
mod rust;
use crate::{
Error,
controllers::cache::RedisController
};
use super::{
super::{
http::HttpClient,
config::BINARY_PROPERTIES
},
task_info,
task_err
};
use once_cell::sync::OnceCell;
use feed_rs::parser::parse;
use reqwest::Response;
use regex::Regex;
use std::sync::Arc;
use poise::serenity_prelude::{
Context,
CreateEmbed,
Timestamp
};
use tokio::time::{
Duration,
interval
};
const TASK_NAME: &str = "RSS";
static REDIS_EXPIRY_SECS: i64 = 7200;
static REDIS_SERVICE: OnceCell<Arc<RedisController>> = OnceCell::new();
async fn redis_() {
let redis = RedisController::new().await.unwrap();
REDIS_SERVICE.set(Arc::new(redis)).unwrap();
}
async fn get_redis() -> Arc<RedisController> {
if REDIS_SERVICE.get().is_none() {
redis_().await;
}
REDIS_SERVICE.get().unwrap().clone()
}
fn format_href_to_discord(input: &str) -> String {
let re = Regex::new(r#"<a href="([^"]+)">([^<]+)</a>"#).unwrap();
re.replace_all(input, r"[$2]($1)").to_string()
}
fn format_html_to_discord(input: String) -> String {
let mut output = input;
// Replace all instances of <p> and </p> with newlines
output = Regex::new(r#"</?\s*p\s*>"#).unwrap().replace_all(&output, "\n").to_string();
// Replace all instances of <br> and <br /> with newlines
output = Regex::new(r#"<\s*br\s*/?\s*>"#).unwrap().replace_all(&output, "\n").to_string();
// Replace all instances of <strong> with **
output = Regex::new(r#"</?\s*strong\s*>"#).unwrap().replace_all(&output, "**").to_string();
// Replace all instances of <var> and <small> with nothing
output = Regex::new(r#"</?\s*(var|small)\s*>"#).unwrap().replace_all(&output, "").to_string();
// Remove any other HTML tags
output = Regex::new(r#"<[^>]+>"#).unwrap().replace_all(&output, "").to_string();
// Replace all instances of <a href="url">text</a> with [text](url)
output = format_href_to_discord(&output);
output
}
async fn fetch_feed(url: &str) -> Result<Response, Error> {
let http = HttpClient::new();
let res = match http.get(url, "RSS-Monitor").await {
Ok(res) => res,
Err(y) => return Err(y.into())
};
Ok(res)
}
async fn save_to_redis(key: &str, value: &str) -> Result<(), Error> {
let redis = get_redis().await;
redis.set(key, value).await.unwrap();
if let Err(y) = redis.expire(key, REDIS_EXPIRY_SECS).await {
task_err("RSS", format!("[RedisExpiry]: {}", y).as_str());
}
Ok(())
}
fn embed(
color: u32,
title: String,
url: String,
description: String,
timestamp: Timestamp
) -> CreateEmbed {
CreateEmbed::new()
.color(color)
.title(title)
.url(url)
.description(description)
.timestamp(timestamp)
}
const MAX_CONTENT_LENGTH: usize = 4000;
fn trim_old_content(s: &str) -> String {
if s.len() > MAX_CONTENT_LENGTH {
s[..MAX_CONTENT_LENGTH].to_string()
} else {
s.to_string()
}
}
enum IncidentColorMap {
Update,
Investigating,
Monitoring,
Resolved,
Default
}
impl IncidentColorMap {
fn color(&self) -> u32 {
match self {
Self::Update => 0xABDD9E, // Madang
Self::Investigating => 0xA5CCE0, // French Pass
Self::Monitoring => 0x81CBAD, // Monte Carlo
Self::Resolved => 0x57F287, // Emerald
Self::Default => 0x81CBAD // Monte Carlo
}
}
}
pub async fn rss(ctx: Arc<Context>) -> Result<(), Error> {
#[cfg(feature = "production")]
let mut interval = interval(Duration::from_secs(300)); // Check feeds every 5 mins
#[cfg(not(feature = "production"))]
let mut interval = interval(Duration::from_secs(30)); // Check feeds every 30 secs
let mut first_run = true;
task_info(&TASK_NAME, "Task loaded!");
loop {
interval.tick().await;
if first_run {
task_info(&format!("{TASK_NAME}:Processor"), "Starting up!");
first_run = false;
}
processor::feed_processor(&ctx).await;
}
}

View File

@ -0,0 +1,79 @@
use crate::Error;
use super::{
super::task_err,
REDIS_EXPIRY_SECS,
get_redis,
save_to_redis,
fetch_feed,
parse,
format_href_to_discord
};
use std::io::Cursor;
use regex::Regex;
use poise::serenity_prelude::{
CreateEmbed,
CreateEmbedAuthor,
Timestamp
};
pub async fn esxi_embed() -> Result<Option<CreateEmbed>, Error> {
let redis = get_redis().await;
let rkey = "RSS_ESXi";
let url = "https://esxi-patches.v-front.de/atom/ESXi-7.0.0.xml";
let res = fetch_feed(url).await?;
let data = res.text().await?;
let cursor = Cursor::new(data);
let feed = parse(cursor).unwrap();
let home_page = feed.links[0].clone().href;
let article = feed.entries[0].clone();
fn get_patch_version(input: &str) -> Option<String> {
let re = Regex::new(r#"(?i)Update\s+([0-9]+)([a-z]?)"#).unwrap();
if let Some(caps) = re.captures(input) {
let update_num = caps[1].to_string();
let letter = caps.get(2).map_or("", |m| m.as_str());
Some(format!("Update {}{}", update_num, letter))
} else {
None
}
}
let cached_patch = redis.get(&rkey).await.unwrap().unwrap_or_default();
if cached_patch.is_empty() {
redis.set(&rkey, &article.categories[3].term).await.unwrap();
if let Err(y) = redis.expire(&rkey, REDIS_EXPIRY_SECS).await {
task_err("RSS", format!("[RedisExpiry]: {}", y).as_str());
}
return Ok(None);
}
if let Some(patch) = get_patch_version(&article.categories[3].term) {
if patch == cached_patch {
return Ok(None);
} else {
save_to_redis(&rkey, &article.categories[3].term).await?;
Ok(Some(CreateEmbed::new()
.color(0x4EFBCB)
.author(CreateEmbedAuthor::new(feed.title.unwrap().content).url(home_page))
.thumbnail(feed.logo.unwrap().uri)
.description(format!(
"{} {} for {} {} has been rolled out!\n{}",
article.categories[2].term,
article.categories[3].term,
article.categories[0].term,
article.categories[1].term,
format_href_to_discord(article.summary.unwrap().content.as_str())
))
.timestamp(Timestamp::from(article.updated.unwrap())))
)
}
} else {
task_err("RSS:ESXi", &format!("Article term does not match the expected RegEx pattern! ({})", article.categories[3].term.as_str()));
Ok(None)
}
}

View File

@ -0,0 +1,107 @@
use crate::Error;
use super::{
super::task_err,
REDIS_EXPIRY_SECS,
IncidentColorMap,
get_redis,
save_to_redis,
fetch_feed,
parse,
embed,
trim_old_content,
format_html_to_discord
};
use std::io::Cursor;
use regex::Regex;
use poise::serenity_prelude::{
CreateEmbed,
Timestamp
};
pub async fn github_embed() -> Result<Option<CreateEmbed>, Error> {
let redis = get_redis().await;
let rkey = "RSS_GitHub";
let rkey_content = format!("{}_Content", rkey);
let url = "https://www.githubstatus.com/history.atom";
let res = fetch_feed(url).await?;
let data = res.text().await?;
let cursor = Cursor::new(data);
let feed = parse(cursor).unwrap();
let incident_page = feed.entries[0].links[0].clone().href;
let article = feed.entries[0].clone();
fn get_incident_id(input: &str) -> Option<String> {
let re = Regex::new(r#"/incidents/([a-zA-Z0-9]+)$"#).unwrap();
if let Some(caps) = re.captures(input) {
Some(caps[1].to_string())
} else {
None
}
}
let cached_incident = redis.get(&rkey).await.unwrap().unwrap_or_default();
let new_content = format_html_to_discord(article.content.unwrap().body.unwrap());
let color: u32;
let update_patt = Regex::new(r"(?i)\bupdate\b").unwrap();
let investigating_patt = Regex::new(r"(?i)\binvestigating\b").unwrap();
let resolved_patt = Regex::new(r"(?i)\bresolved\b").unwrap();
let date_patt = Regex::new(r"\b[A-Z][a-z]{2} \d{2}, \d{2}:\d{2} UTC\b").unwrap();
let first_entry = date_patt.split(&new_content).map(str::trim).find(|e| !e.is_empty()).unwrap_or(&new_content);
color = if update_patt.is_match(&first_entry) {
IncidentColorMap::Update.color()
} else if investigating_patt.is_match(&first_entry) {
IncidentColorMap::Investigating.color()
} else if resolved_patt.is_match(&first_entry) {
IncidentColorMap::Resolved.color()
} else {
IncidentColorMap::Default.color()
};
if cached_incident.is_empty() {
redis.set(&rkey, &get_incident_id(&article.links[0].href).unwrap()).await.unwrap();
redis.set(&rkey_content, &new_content).await.unwrap();
if let Err(y) = redis.expire(&rkey, REDIS_EXPIRY_SECS).await {
task_err("RSS", format!("[RedisExpiry]: {}", y).as_str());
}
return Ok(None);
}
if let Some(incident) = get_incident_id(&article.links[0].href) {
if incident == cached_incident {
let cached_content: String = redis.get(&rkey_content).await.unwrap().unwrap_or_default();
if cached_content == new_content {
return Ok(None);
} else {
redis.set(&rkey_content, &new_content).await.unwrap();
redis.expire(&rkey_content, 21600).await.unwrap();
return Ok(Some(embed(
color,
article.title.unwrap().content,
incident_page,
trim_old_content(&new_content),
Timestamp::from(article.updated.unwrap())
)));
}
} else {
save_to_redis(&rkey, &incident).await?;
redis.set(&rkey_content, &new_content).await.unwrap();
return Ok(Some(embed(
color,
article.title.unwrap().content,
incident_page,
trim_old_content(&new_content),
Timestamp::from(article.updated.unwrap())
)));
}
} else {
task_err("RSS:GitHub", &format!("Incident ID does not match the expected RegEx pattern! ({})", &article.links[0].href));
Ok(None)
}
}

View File

@ -0,0 +1,110 @@
use crate::Error;
use super::{
super::task_err,
REDIS_EXPIRY_SECS,
IncidentColorMap,
get_redis,
save_to_redis,
fetch_feed,
parse,
embed,
trim_old_content,
format_html_to_discord
};
use std::io::Cursor;
use regex::Regex;
use poise::serenity_prelude::{
CreateEmbed,
Timestamp
};
pub async fn gportal_embed() -> Result<Option<CreateEmbed>, Error> {
let redis = get_redis().await;
let rkey = "RSS_GPortal";
let rkey_content = format!("{}_Content", rkey);
let url = "https://status.g-portal.com/history.atom";
let res = fetch_feed(url).await?;
let data = res.text().await?;
let cursor = Cursor::new(data);
let feed = parse(cursor).unwrap();
let incident_page = feed.links[0].clone().href;
let article = feed.entries[0].clone();
fn get_incident_id(input: &str) -> Option<String> {
let re = Regex::new(r#"/incidents/([a-zA-Z0-9]+)$"#).unwrap();
if let Some(caps) = re.captures(input) {
Some(caps[1].to_string())
} else {
None
}
}
let cached_incident = redis.get(&rkey).await.unwrap().unwrap_or_default();
let new_content = format_html_to_discord(article.content.unwrap().body.unwrap());
let color: u32;
let update_patt = Regex::new(r"(?i)\bupdate\b").unwrap();
let investigating_patt = Regex::new(r"(?i)\binvestigating\b").unwrap();
let monitoring_patt = Regex::new(r"(?i)\bmonitoring\b").unwrap();
let resolved_patt = Regex::new(r"(?i)\bresolved\b").unwrap();
let date_patt = Regex::new(r"\b[A-Z][a-z]{2} \d{2}, \d{2}:\d{2} UTC\b").unwrap();
let first_entry = date_patt.split(&new_content).map(str::trim).find(|e| !e.is_empty()).unwrap_or(&new_content);
color = if update_patt.is_match(&first_entry) {
IncidentColorMap::Update.color()
} else if investigating_patt.is_match(&first_entry) {
IncidentColorMap::Investigating.color()
} else if monitoring_patt.is_match(&first_entry) {
IncidentColorMap::Monitoring.color()
} else if resolved_patt.is_match(&first_entry) {
IncidentColorMap::Resolved.color()
} else {
IncidentColorMap::Default.color()
};
if cached_incident.is_empty() {
redis.set(&rkey, &get_incident_id(&article.links[0].href).unwrap()).await.unwrap();
redis.set(&rkey_content, &new_content).await.unwrap();
if let Err(y) = redis.expire(&rkey, REDIS_EXPIRY_SECS).await {
task_err("RSS", format!("[RedisExpiry]: {}", y).as_str());
}
return Ok(None);
}
if let Some(incident) = get_incident_id(&article.links[0].href) {
if incident == cached_incident {
let cached_content: String = redis.get(&rkey_content).await.unwrap().unwrap_or_default();
if cached_content == new_content {
return Ok(None);
} else {
redis.set(&rkey_content, &new_content).await.unwrap();
redis.expire(&rkey_content, 21600).await.unwrap();
return Ok(Some(embed(
color,
article.title.unwrap().content,
incident_page,
trim_old_content(&new_content),
Timestamp::from(article.updated.unwrap())
)));
}
} else {
save_to_redis(&rkey, &incident).await?;
redis.set(&rkey_content, &new_content).await.unwrap();
return Ok(Some(embed(
color,
article.title.unwrap().content,
incident_page,
trim_old_content(&new_content),
Timestamp::from(article.updated.unwrap())
)));
}
} else {
task_err("RSS:GPortal", &format!("Incident ID does not match the expected RegEx pattern! ({})", &article.links[0].href));
Ok(None)
}
}

View File

@ -0,0 +1,121 @@
use super::{
task_err,
TASK_NAME,
BINARY_PROPERTIES,
get_redis,
esxi::esxi_embed,
github::github_embed,
gportal::gportal_embed,
rust::rust_message
};
use regex::Regex;
use tokio::time::{
Duration,
sleep
};
use poise::serenity_prelude::{
Context,
ChannelId,
EditMessage,
CreateMessage,
CreateEmbed,
};
// This is for building up the embed with the feed data
/* std::fs::File::create("rss_name.log").unwrap();
std::fs::write("rss_name.log", format!("{:#?}", feed))?; */
// todo; have a reusable function for feeding RSS data and building the embed out of it.
// see github.rs / esxi.rs / gportal.rs for references of this idea.
async fn process_embed(
ctx: &Context,
embed: Option<CreateEmbed>,
redis_key: &str,
content_key: &str
) -> Result<(), crate::Error> {
if let Some(embed) = embed {
let redis = get_redis().await;
let channel = ChannelId::new(BINARY_PROPERTIES.rss_channel);
let msg_id_key: Option<String> = redis.get(redis_key).await?;
let cached_content: Option<String> = redis.get(content_key).await.unwrap_or(None);
if let Some(msg_id_key) = msg_id_key {
if let Ok(msg_id) = msg_id_key.parse::<u64>() {
if let Ok(mut message) = channel.message(&ctx.http, msg_id).await {
let new_description = message.embeds[0].description.clone().unwrap();
if cached_content.as_deref() != Some(&new_description) {
message.edit(&ctx.http, EditMessage::new().embed(embed)).await?;
}
sleep(Duration::from_secs(15)).await;
if Regex::new(r"(?i)\bresolved\b").unwrap().is_match(&new_description) {
message.reply(&ctx.http, "This incident has been marked as resolved!").await?;
redis.del(redis_key).await?;
}
}
}
} else {
let message = channel.send_message(&ctx.http, CreateMessage::new().add_embed(embed)).await?;
redis.set(redis_key, &message.id.to_string()).await?;
redis.expire(redis_key, 36000).await?;
}
}
Ok(())
}
pub async fn feed_processor(ctx: &Context) {
let mut log_msgs: Vec<String> = Vec::new();
match esxi_embed().await {
Ok(Some(embed)) => {
ChannelId::new(BINARY_PROPERTIES.rss_channel).send_message(&ctx.http, CreateMessage::new().add_embed(embed)).await.unwrap();
},
Ok(None) => (),
Err(y) => {
log_msgs.push(format!("**[{TASK_NAME}:ESXi:Error]:** Feed failed with the following error:```\n{}\n```", y));
task_err(&TASK_NAME, &y.to_string())
}
}
match gportal_embed().await {
Ok(Some(embed)) => process_embed(&ctx, Some(embed), "RSS_GPortal_MsgID", "RSS_GPortal_Content").await.unwrap(),
Ok(None) => (),
Err(y) => {
log_msgs.push(format!("**[{TASK_NAME}:GPortal:Error]:** Feed failed with the following error:```\n{}\n```", y));
task_err(&TASK_NAME, &y.to_string())
}
}
match github_embed().await {
Ok(Some(embed)) => process_embed(&ctx, Some(embed), "RSS_GitHub_MsgID", "RSS_GitHub_Content").await.unwrap(),
Ok(None) => (),
Err(y) => {
log_msgs.push(format!("**[{TASK_NAME}:GitHub:Error]:** Feed failed with the following error:```\n{}\n```", y));
task_err(&TASK_NAME, &y.to_string())
}
}
match rust_message().await {
Ok(Some(content)) => {
ChannelId::new(BINARY_PROPERTIES.rss_channel).send_message(&ctx.http, CreateMessage::new().content(content)).await.unwrap();
},
Ok(None) => (),
Err(y) => {
log_msgs.push(format!("**[{TASK_NAME}:RustBlog:Error]:** Feed failed with the following error:```\n{}\n```", y));
task_err(&TASK_NAME, &y.to_string())
}
}
if !log_msgs.is_empty() {
ChannelId::new(BINARY_PROPERTIES.kon_logs).send_message(
&ctx.http, CreateMessage::new().content(log_msgs.join("\n"))
).await.unwrap();
}
}

View File

@ -0,0 +1,53 @@
use crate::Error;
use super::{
task_err,
REDIS_EXPIRY_SECS,
get_redis,
save_to_redis,
fetch_feed,
parse
};
use std::io::Cursor;
use regex::Regex;
pub async fn rust_message() -> Result<Option<String>, Error> {
let redis = get_redis().await;
let rkey = "RSS_RustBlog";
let url = "https://blog.rust-lang.org/feed.xml";
let res = fetch_feed(url).await?;
let data = res.text().await?;
let cursor = Cursor::new(data);
let feed = parse(cursor).unwrap();
let article = feed.entries[0].clone();
let article_id = article.id.clone();
fn get_blog_title(input: String) -> Option<String> {
let re = Regex::new(r"https://blog\.rust-lang\.org/(\d{4}/\d{2}/\d{2}/[^/]+)").unwrap();
re.captures(input.as_str()).and_then(|caps| caps.get(1).map(|m| m.as_str().to_string()))
}
let cached_blog = redis.get(&rkey).await.unwrap().unwrap_or_default();
if cached_blog.is_empty() {
redis.set(&rkey, get_blog_title(article.id).unwrap().as_str()).await.unwrap();
if let Err(y) = redis.expire(&rkey, REDIS_EXPIRY_SECS).await {
task_err("RSS", format!("[RedisExpiry]: {}", y).as_str());
}
return Ok(None);
}
if let Some(blog) = get_blog_title(article.id) {
if blog == cached_blog {
return Ok(None);
} else {
save_to_redis(&rkey, &blog).await?;
Ok(Some(format!("Rust Team has put out a new article!\n**[{}](<{}>)**", article.links[0].title.clone().unwrap(), article.links[0].href)))
}
} else {
task_err("RSS:RustBlog", &format!("Article URL does not match the expected RegEx pattern! ({})", article_id));
Ok(None)
}
}

View File

@ -1,19 +1,23 @@
use tokenservice_client::{TokenService, TokenServiceApi}; use tokenservice_client::{
TokenService,
TokenServiceApi
};
pub struct TSClient { pub struct TSClient(TokenService);
client: TokenService
}
impl TSClient { impl TSClient {
pub fn new() -> Self { pub fn new() -> Self {
let args: Vec<String> = std::env::args().collect(); let args: Vec<String> = std::env::args().collect();
let service = if args.len() > 1 { args[1].as_str() } else { "kon" }; let service = if args.len() > 1 { &args[1] } else { "kon" };
TSClient { Self(TokenService::new(service))
client: TokenService::new(service)
} }
}
pub async fn get(&self) -> Result<TokenServiceApi, Box<dyn std::error::Error>> { pub async fn get(&self) -> Result<TokenServiceApi, crate::Error> {
let api = self.client.connect().await.unwrap(); match self.0.connect().await {
Ok(api) => {
Ok(api) Ok(api)
} }
Err(e) => Err(e)
}
}
} }

View File

@ -1,20 +1,42 @@
use once_cell::sync::Lazy; use poise::serenity_prelude::UserId;
use std::sync::LazyLock;
use tokio::sync::Mutex;
use tokenservice_client::TokenServiceApi; use tokenservice_client::TokenServiceApi;
use super::tsclient::TSClient;
pub static EMBED_COLOR: i32 = 0x5a99c7; pub static BOT_VERSION: LazyLock<String> = LazyLock::new(|| {
let cargo_version = cargo_toml::Manifest::from_str(&include_str!("../../Cargo.toml"))
pub static BOT_VERSION: Lazy<String> = Lazy::new(|| { .unwrap()
let cargo_version = cargo_toml::Manifest::from_path("Cargo.toml").unwrap().package.unwrap().version.unwrap(); .package
.unwrap()
.version
.unwrap();
format!("v{}", cargo_version) format!("v{}", cargo_version)
}); });
static TSCLIENT: LazyLock<Mutex<TSClient>> = LazyLock::new(|| Mutex::new(TSClient::new()));
pub async fn token_path() -> TokenServiceApi { pub async fn token_path() -> TokenServiceApi {
let client = super::tsclient::TSClient::new().get().await.unwrap(); TSCLIENT.lock().await.get().await.unwrap()
client
} }
pub fn concat_message(messages: Vec<String>) -> String { pub fn mention_dev(ctx: poise::Context<'_, (), crate::Error>) -> Option<String> {
messages.join("\n") let devs = super::config::BINARY_PROPERTIES.developers.clone();
let app_owners = ctx.framework().options().owners.clone();
let mut mentions = Vec::new();
for dev in devs {
if app_owners.contains(&UserId::new(dev)) {
mentions.push(format!("<@{}>", dev));
}
}
if mentions.is_empty() {
None
} else {
Some(mentions.join(", "))
}
} }
pub fn format_duration(secs: u64) -> String { pub fn format_duration(secs: u64) -> String {
@ -37,3 +59,24 @@ pub fn format_duration(secs: u64) -> String {
formatted_string formatted_string
} }
pub fn format_bytes(bytes: u64) -> String {
let units = ["B", "KB", "MB", "GB", "TB", "PB"];
let mut value = bytes as f64;
let mut unit = units[0];
for &u in &units[1..] {
if value < 1024.0 {
break;
}
value /= 1024.0;
unit = u;
}
if unit == "B" {
format!("{}{}", value, unit)
} else {
format!("{:.2}{}", value, unit)
}
}

View File

@ -1,11 +1,25 @@
mod commands; mod commands;
mod controllers; mod controllers;
mod models;
mod internals; mod internals;
// https://cdn.toast-server.net/RustFSHiearchy.png
// Using the new filesystem hierarchy
use crate::internals::{
utils::{
BOT_VERSION,
token_path,
mention_dev
},
tasks::{
run_task,
rss
},
config::BINARY_PROPERTIES
};
use std::{ use std::{
env::var, sync::Arc,
error thread::current
}; };
use poise::serenity_prelude::{ use poise::serenity_prelude::{
builder::{ builder::{
@ -13,45 +27,65 @@ use poise::serenity_prelude::{
CreateEmbed, CreateEmbed,
CreateEmbedAuthor CreateEmbedAuthor
}, },
Context,
Ready, Ready,
Context,
FullEvent,
ClientBuilder, ClientBuilder,
ChannelId, ChannelId,
Command,
GatewayIntents GatewayIntents
}; };
type Error = Box<dyn error::Error + Send + Sync>; type Error = Box<dyn std::error::Error + Send + Sync>;
static BOT_READY_NOTIFY: u64 = 865673694184996888; #[cfg(feature = "production")]
pub static GIT_COMMIT_HASH: &str = env!("GIT_COMMIT_HASH");
pub static GIT_COMMIT_BRANCH: &str = env!("GIT_COMMIT_BRANCH");
#[cfg(not(feature = "production"))]
pub static GIT_COMMIT_HASH: &str = "devel";
async fn on_ready( async fn on_ready(
ctx: &Context, ctx: &Context,
ready: &Ready, ready: &Ready,
framework: &poise::Framework<(), Error> _framework: &poise::Framework<(), Error>
) -> Result<(), Error> { ) -> Result<(), Error> {
println!("Connected to API as {}", ready.user.name); #[cfg(not(feature = "production"))]
{
println!("Event[Ready][Notice]: Detected a non-production environment!");
let gateway = ctx.http.get_bot_gateway().await?;
let session = gateway.session_start_limit;
println!("Event[Ready][Notice]: Session limit: {}/{}", session.remaining, session.total);
}
println!("Event[Ready]: Build version: {} ({}:{})", BOT_VERSION.to_string(), GIT_COMMIT_HASH, GIT_COMMIT_BRANCH);
println!("Event[Ready]: Connected to API as {}", ready.user.name);
let message = CreateMessage::new(); let message = CreateMessage::new();
let ready_embed = CreateEmbed::new() let ready_embed = CreateEmbed::new()
.color(internals::utils::EMBED_COLOR) .color(BINARY_PROPERTIES.embed_color)
.thumbnail(ready.user.avatar_url().unwrap_or_default()) .thumbnail(ready.user.avatar_url().unwrap_or_default())
.author(CreateEmbedAuthor::new(format!("{} is ready!", ready.user.name)).clone()); .author(CreateEmbedAuthor::new(format!("{} is ready!", ready.user.name)));
ChannelId::new(BOT_READY_NOTIFY).send_message(&ctx.http, message.add_embed(ready_embed)).await?; ChannelId::new(BINARY_PROPERTIES.ready_notify).send_message(&ctx.http, message.add_embed(ready_embed)).await?;
let register_commands = var("REGISTER_CMDS").unwrap_or_else(|_| String::from("true")).parse::<bool>().unwrap_or(true); Ok(())
}
if register_commands { async fn event_processor(
let builder = poise::builtins::create_application_commands(&framework.options().commands); ctx: &Context,
let commands = Command::set_global_commands(&ctx.http, builder).await; event: &FullEvent,
_framework: poise::FrameworkContext<'_, (), Error>
) -> Result<(), Error> {
match event {
FullEvent::Ready { .. } => {
let thread_id = format!("{:?}", current().id());
let thread_num: String = thread_id.chars().filter(|c| c.is_digit(10)).collect();
println!("Event[Ready]: Task Scheduler operating on thread {}", thread_num);
match commands { let ctx = Arc::new(ctx.clone());
Ok(cmdmap) => for command in cmdmap.iter() { run_task(ctx.clone(), rss).await;
println!("Registered command globally: {}", command.name);
},
Err(why) => println!("Error registering commands: {:?}", why)
} }
_ => {}
} }
Ok(()) Ok(())
@ -59,47 +93,67 @@ async fn on_ready(
#[tokio::main] #[tokio::main]
async fn main() { async fn main() {
let db = controllers::database::DatabaseController::new().await.expect("Failed to connect to database");
let framework = poise::Framework::builder() let framework = poise::Framework::builder()
.options(poise::FrameworkOptions { .options(poise::FrameworkOptions {
commands: vec![ commands: vec![
commands::deploy(),
commands::ilo::ilo(),
commands::ping::ping(), commands::ping::ping(),
commands::uptime::uptime(),
commands::status::status(), commands::status::status(),
commands::gameserver::gameserver() commands::midi::midi_to_wav(),
commands::uptime::uptime()
], ],
prefix_options: poise::PrefixFrameworkOptions {
prefix: Some(String::from("konata")),
mention_as_prefix: false,
case_insensitive_commands: true,
ignore_bots: true,
ignore_thread_creation: true,
..Default::default()
},
pre_command: |ctx| Box::pin(async move { pre_command: |ctx| Box::pin(async move {
let get_guild_name = match ctx.guild() { let get_guild_name = match ctx.guild() {
Some(guild) => guild.name.clone(), Some(guild) => guild.name.clone(),
None => String::from("DM") None => String::from("Direct Message")
}; };
println!("[{}] {} ran /{}", get_guild_name, ctx.author().name, ctx.command().qualified_name) println!("Discord[{}]: {} ran /{}", get_guild_name, ctx.author().name, ctx.command().qualified_name);
}), }),
on_error: |error| Box::pin(async move { on_error: |error| Box::pin(async move {
match error { match error {
poise::FrameworkError::Command { error, ctx, .. } => { poise::FrameworkError::Command { error, ctx, .. } => {
println!("PoiseCommandError({}): {}", ctx.command().qualified_name, error); println!("PoiseCommandError({}): {}", ctx.command().qualified_name, error);
} ctx.reply(format!(
"Encountered an error during command execution, ask {} to check console for more details!",
mention_dev(ctx).unwrap_or_default()
)).await.expect("Error sending message");
},
poise::FrameworkError::EventHandler { error, event, .. } => println!("PoiseEventHandlerError({}): {}", event.snake_case_name(), error),
poise::FrameworkError::Setup { error, .. } => println!("PoiseSetupError: {}", error),
poise::FrameworkError::UnknownInteraction { interaction, .. } => println!(
"PoiseUnknownInteractionError: {} tried to execute an unknown interaction ({})",
interaction.user.name,
interaction.data.name
),
other => println!("PoiseOtherError: {}", other) other => println!("PoiseOtherError: {}", other)
} }
}), }),
initialize_owners: true, initialize_owners: true,
event_handler: |ctx, event, framework, _| Box::pin(event_processor(ctx, event, framework)),
..Default::default() ..Default::default()
}) })
.setup(|ctx, ready, framework| Box::pin(on_ready(ctx, ready, framework))) .setup(|ctx, ready, framework| Box::pin(on_ready(ctx, ready, framework)))
.build(); .build();
let mut client = ClientBuilder::new(internals::utils::token_path().await.main, GatewayIntents::GUILDS) let mut client = ClientBuilder::new(
token_path().await.main,
GatewayIntents::GUILDS
| GatewayIntents::GUILD_MESSAGES
| GatewayIntents::MESSAGE_CONTENT
)
.framework(framework) .framework(framework)
.await.expect("Error creating client"); .await.expect("Error creating client");
{
let mut data = client.data.write().await;
data.insert::<controllers::database::DatabaseController>(db);
}
if let Err(why) = client.start().await { if let Err(why) = client.start().await {
println!("Client error: {:?}", why); println!("Error starting client: {:#?}", why);
} }
} }

View File

@ -1,103 +0,0 @@
use crate::controllers::database::DatabaseController;
pub struct Gameservers {
pub server_name: String,
pub game_name: String,
pub guild_owner: i64,
pub ip_address: String
}
impl Gameservers {
pub async fn list_servers(guild_id: u64) -> Result<Vec<Self>, tokio_postgres::Error> {
let client = DatabaseController::new().await?.client;
let rows = client.query("
SELECT * FROM gameservers
WHERE guild_owner = $1
", &[&(guild_id as i64)]).await?;
let mut servers = Vec::new();
for row in rows {
servers.push(Self {
server_name: row.get("server_name"),
game_name: row.get("game_name"),
guild_owner: row.get("guild_owner"),
ip_address: row.get("ip_address")
});
}
Ok(servers)
}
pub async fn add_server(
guild_id: u64,
server_name: &str,
game_name: &str,
ip_address: &str
) -> Result<(), tokio_postgres::Error> {
let client = DatabaseController::new().await?.client;
client.execute("
INSERT INTO gameservers (server_name, game_name, guild_owner, ip_address)
VALUES ($1, $2, $3, $4)
", &[&server_name, &game_name, &(guild_id as i64), &ip_address]).await?;
Ok(())
}
pub async fn remove_server(guild_id: u64, server_name: &str) -> Result<(), tokio_postgres::Error> {
let client = DatabaseController::new().await?.client;
client.execute("
DELETE FROM gameservers
WHERE guild_owner = $1 AND server_name = $2
", &[&(guild_id as i64), &server_name]).await?;
Ok(())
}
pub async fn update_server(
guild_id: u64,
server_name: &str,
game_name: &str,
ip_address: &str
) -> Result<(), tokio_postgres::Error> {
let client = DatabaseController::new().await?.client;
client.execute("
UPDATE gameservers
SET game_name = $1, ip_address = $2
WHERE guild_owner = $3 AND server_name = $4
", &[&game_name, &ip_address, &(guild_id as i64), &server_name]).await?;
Ok(())
}
pub async fn get_server_names(guild_id: u64) -> Result<Vec<String>, tokio_postgres::Error> {
let client = DatabaseController::new().await?.client;
let rows = client.query("
SELECT server_name FROM gameservers
WHERE guild_owner = $1
", &[&(guild_id as i64)]).await?;
let mut servers = Vec::new();
for row in rows {
servers.push(row.get("server_name"));
}
Ok(servers)
}
pub async fn get_server_data(guild_id: u64, server_name: &str) -> Result<Vec<String>, tokio_postgres::Error> {
let client = DatabaseController::new().await?.client;
let rows = client.query("
SELECT * FROM gameservers
WHERE guild_owner = $1 AND server_name = $2
", &[&(guild_id as i64), &server_name]).await?;
let mut server = Vec::new();
for row in rows {
server.push(row.get("server_name"));
server.push(row.get("game_name"));
server.push(row.get("ip_address"))
}
Ok(server)
}
}

View File

@ -1 +0,0 @@
pub mod gameservers;