Compare commits

...

10 Commits

Author SHA1 Message Date
31616613f3 Put Soundfont file onto LFS
Some checks failed
Build and push container image / build (push) Has been cancelled
2024-08-01 14:25:01 +10:00
56f5fd2bf9 Add more iLO data
All checks were successful
Build and push container image / build (push) Successful in 16m26s
2024-07-31 12:21:26 +10:00
081ff781de Use defer in reply
All checks were successful
Build and push container image / build (push) Successful in 15m59s
2024-07-30 19:25:51 +10:00
1f74db5346 Add iLO command
All checks were successful
Build and push container image / build (push) Successful in 15m41s
2024-07-30 19:00:31 +10:00
3700fe34f0 Add bold markdown to the Rust blog message
All checks were successful
Build and push container image / build (push) Successful in 15m32s
2024-07-27 23:44:17 -04:00
003d17a15f [no ci] It got caught by the builder 2024-07-28 10:42:42 +10:00
7047ada6a2 Add RSS implementation
All checks were successful
Build and push container image / build (push) Successful in 15m2s
2024-07-28 09:54:20 +10:00
de13b472bb Use the /etc/os-release instead for OS info
All checks were successful
Build and push container image / build (push) Successful in 14m53s
2024-07-25 15:42:10 +10:00
f936c29814 Update midi.rs 2024-07-25 15:39:15 +10:00
890550c6ef Add conversion command for midi files
All checks were successful
Build and push container image / build (push) Successful in 17m19s
2024-07-22 16:41:20 +10:00
22 changed files with 1363 additions and 289 deletions

1
.gitattributes vendored Normal file
View File

@ -0,0 +1 @@
src/internals/assets/FluidR3_GM.sf2 filter=lfs diff=lfs merge=lfs -text

1
.gitignore vendored
View File

@ -1,2 +1,3 @@
target target
.env .env
*.log

597
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@ -1,22 +1,25 @@
[package] [package]
name = "kon" name = "kon"
version = "0.3.0" version = "0.3.7"
edition = "2021" edition = "2021"
[dependencies] [dependencies]
bb8 = "0.8.5" bb8 = "0.8.5"
bb8-postgres = "0.8.1" bb8-postgres = "0.8.1"
bb8-redis = "0.15.0"
cargo_toml = "0.20.4" cargo_toml = "0.20.4"
feed-rs = "2.1.0"
once_cell = "1.19.0" once_cell = "1.19.0"
os_info = "3.8.2" os_info = "3.8.2"
poise = "0.6.1" poise = "0.6.1"
regex = "1.10.5"
reqwest = { version = "0.12.5", features = ["json"] } reqwest = { version = "0.12.5", features = ["json"] }
serde = "1.0.204" serde = "1.0.204"
serde_json = "1.0.120" serde_json = "1.0.120"
sysinfo = "0.30.13" sysinfo = "0.30.13"
tokenservice-client = { version = "0.3.2", registry = "gitea" } tokenservice-client = { version = "0.3.3", registry = "gitea" }
tokio = { version = "1.38.1", features = ["macros", "signal", "rt-multi-thread"] } tokio = { version = "1.39.2", features = ["macros", "signal", "rt-multi-thread"] }
tokio-postgres = "0.7.10" tokio-postgres = "0.7.11"
uptime_lib = "0.3.1" uptime_lib = "0.3.1"
[features] [features]

View File

@ -1,6 +1,5 @@
FROM rust:1.79-alpine3.20 AS chef FROM rust:1.80-alpine3.20 AS chef
ENV RUSTFLAGS="-C target-feature=-crt-static" ENV RUSTFLAGS="-C target-feature=-crt-static"
ARG CARGO_TOKEN
RUN apk add --no-cache openssl-dev musl-dev RUN apk add --no-cache openssl-dev musl-dev
RUN cargo install cargo-chef RUN cargo install cargo-chef
WORKDIR /usr/src/kon WORKDIR /usr/src/kon
@ -8,17 +7,21 @@ WORKDIR /usr/src/kon
FROM chef AS planner FROM chef AS planner
COPY . . COPY . .
RUN mkdir -p .cargo && \ RUN mkdir -p .cargo && \
printf '[registries.gitea]\nindex = "sparse+https://git.toast-server.net/api/packages/toast/cargo/"\ntoken = "Bearer %s"\n' "$CARGO_TOKEN" >> .cargo/config.toml printf '[registries.gitea]\nindex = "sparse+https://git.toast-server.net/api/packages/toast/cargo/"' >> .cargo/config.toml
RUN cargo chef prepare RUN cargo chef prepare
FROM chef AS builder FROM chef AS dependencies
COPY --from=planner /usr/src/kon/recipe.json recipe.json COPY --from=planner /usr/src/kon/recipe.json recipe.json
RUN cargo chef cook --release RUN cargo chef cook --release
FROM chef AS builder
COPY --from=planner /usr/src/kon/.cargo /usr/src/kon/.cargo
COPY --from=dependencies /usr/src/kon/target /usr/src/kon/target
COPY . . COPY . .
RUN cargo build -rF production RUN cargo build -rF production
FROM alpine:3.20 FROM alpine:3.20
RUN apk add --no-cache libgcc RUN apk add --no-cache libgcc fluidsynth
WORKDIR /kon WORKDIR /kon
COPY --from=builder /usr/src/kon/target/release/kon . COPY --from=builder /usr/src/kon/target/release/kon .
CMD [ "./kon" ] CMD [ "./kon" ]

View File

@ -1,11 +1,18 @@
services: services:
bot: bot:
container_name: kon container_name: kon
#image: 'git.toast-server.net/toast/kon:main' #image: git.toast-server.net/toast/kon:main
build: . build: .
restart: unless-stopped restart: unless-stopped
# depends_on: depends_on:
# - db - cache
cache:
container_name: kon-redis
image: redis/redis-stack-server:7.4.0-rc2
restart: unless-stopped
ports:
- 37935:6379/tcp
# db: # db:
# container_name: kon-database # container_name: kon-database
# image: postgres:16.2-alpine3.19@sha256:951bfda460300925caa3949eaa092ba022e9aec191bbea9056a39e2382260b27 # image: postgres:16.2-alpine3.19@sha256:951bfda460300925caa3949eaa092ba022e9aec191bbea9056a39e2382260b27

View File

@ -1,3 +1,5 @@
pub mod ilo;
pub mod midi;
pub mod ping; pub mod ping;
pub mod status; pub mod status;
pub mod uptime; pub mod uptime;

295
src/commands/ilo.rs Normal file
View File

@ -0,0 +1,295 @@
use crate::{
Error,
internals::{
config::BINARY_PROPERTIES,
utils::token_path
}
};
use reqwest::{
ClientBuilder,
Error as ReqError
};
use serde::{
Serialize,
Deserialize
};
use poise::{
CreateReply,
serenity_prelude::{
CreateEmbed,
Timestamp
}
};
#[derive(Serialize, Deserialize)]
struct Chassis {
#[serde(rename = "Fans")]
fans: Vec<Fan>,
#[serde(rename = "Temperatures")]
temperatures: Vec<Temperature>
}
#[derive(Serialize, Deserialize)]
struct Fan {
#[serde(rename = "CurrentReading")]
current_reading: i32,
#[serde(rename = "FanName")]
fan_name: String,
#[serde(rename = "Status")]
status: Status,
}
#[derive(Serialize, Deserialize)]
struct Temperature {
#[serde(rename = "CurrentReading")]
current_reading: i32,
#[serde(rename = "Name")]
name: String,
#[serde(rename = "ReadingCelsius")]
reading_celsius: i32,
#[serde(rename = "Status")]
status: Status,
#[serde(rename = "Units")]
units: String,
#[serde(rename = "UpperThresholdCritical")]
upper_threshold_critical: i32,
#[serde(rename = "UpperThresholdFatal")]
upper_threshold_fatal: i32
}
#[derive(Serialize, Deserialize)]
struct Status {
#[serde(rename = "Health")]
health: Option<String>,
#[serde(rename = "State")]
state: String
}
#[derive(Serialize, Deserialize, Debug)]
struct Power {
#[serde(rename = "PowerCapacityWatts")]
power_capacity_watts: i32,
#[serde(rename = "PowerConsumedWatts")]
power_consumed_watts: i32,
#[serde(rename = "PowerMetrics")]
power_metrics: PowerMetrics
}
#[derive(Serialize, Deserialize, Debug)]
struct PowerMetrics {
#[serde(rename = "AverageConsumedWatts")]
average_consumed_watts: i32,
#[serde(rename = "MaxConsumedWatts")]
max_consumed_watts: i32,
#[serde(rename = "MinConsumedWatts")]
min_consumed_watts: i32
}
#[derive(Serialize, Deserialize)]
struct System {
#[serde(rename = "Memory")]
memory: Memory,
#[serde(rename = "Model")]
model: String,
#[serde(rename = "Oem")]
oem: Oem,
#[serde(rename = "PowerState")]
power_state: String,
#[serde(rename = "ProcessorSummary")]
processor_summary: ProcessorSummary
}
#[derive(Serialize, Deserialize)]
struct Memory {
#[serde(rename = "TotalSystemMemoryGB")]
total_system_memory: i32
}
#[derive(Serialize, Deserialize)]
struct ProcessorSummary {
#[serde(rename = "Count")]
count: i32,
#[serde(rename = "Model")]
cpu: String
}
#[derive(Serialize, Deserialize)]
struct Oem {
#[serde(rename = "Hp")]
hp: Hp
}
#[derive(Serialize, Deserialize)]
struct Hp {
#[serde(rename = "PostState")]
post_state: String
}
const ILO_HOSTNAME: &str = "POMNI";
enum RedfishEndpoint {
Thermal,
Power,
System
}
impl RedfishEndpoint {
fn url(&self) -> String {
match self {
RedfishEndpoint::Thermal => "Chassis/1/Thermal".to_string(),
RedfishEndpoint::Power => "Chassis/1/Power".to_string(),
RedfishEndpoint::System => "Systems/1".to_string()
}
}
}
async fn ilo_data(endpoint: RedfishEndpoint) -> Result<Box<dyn std::any::Any + Send>, ReqError> {
let client = ClientBuilder::new()
.danger_accept_invalid_certs(true)
.build()
.unwrap();
let res = client
.get(format!("https://{}/redfish/v1/{}", token_path().await.ilo_ip, endpoint.url()))
.basic_auth(token_path().await.ilo_user, Some(token_path().await.ilo_pw))
.send()
.await
.unwrap();
match endpoint {
RedfishEndpoint::Thermal => {
let body: Chassis = res.json().await.unwrap();
Ok(Box::new(body))
}
RedfishEndpoint::Power => {
let body: Power = res.json().await.unwrap();
Ok(Box::new(body))
}
RedfishEndpoint::System => {
let body: System = res.json().await.unwrap();
Ok(Box::new(body))
}
}
}
/// Retrieve data from the HP iLO4 interface
#[poise::command(
slash_command,
subcommands("temperature", "power", "system")
)]
pub async fn ilo(_: poise::Context<'_, (), Error>) -> Result<(), Error> {
Ok(())
}
/// Retrieve the server's temperature data
#[poise::command(slash_command)]
pub async fn temperature(ctx: poise::Context<'_, (), Error>) -> Result<(), Error> {
ctx.defer().await?;
let ilo = ilo_data(RedfishEndpoint::Thermal).await.unwrap();
let data = ilo.downcast_ref::<Chassis>().unwrap();
let mut tempdata = String::new();
let mut fandata = String::new();
let allowed_sensors = [
"01-Inlet Ambient",
"04-P1 DIMM 1-6",
"14-Chipset Zone"
];
for temp in &data.temperatures {
if temp.reading_celsius == 0 || !allowed_sensors.contains(&temp.name.as_str()) {
continue;
}
let name = match temp.name.as_str() {
"01-Inlet Ambient" => "Inlet Ambient",
"04-P1 DIMM 1-6" => "P1 DIMM 1-6",
"14-Chipset Zone" => "Chipset Zone",
_ => "Unknown Sensor"
};
tempdata.push_str(&format!("**{}:** `{}°C`\n", name, temp.reading_celsius));
}
for fan in &data.fans {
if fan.current_reading == 0 {
continue;
}
fandata.push_str(&format!("**{}:** `{}%`\n", fan.fan_name, fan.current_reading));
}
ctx.send(CreateReply::default().embed(
CreateEmbed::new()
.color(BINARY_PROPERTIES.embed_color)
.timestamp(Timestamp::now())
.title(format!("{} - Temperatures", ILO_HOSTNAME))
.fields(vec![
("Temperatures", tempdata, false),
("Fans", fandata, false)
])
)).await?;
Ok(())
}
/// Retrieve the server's power data
#[poise::command(slash_command)]
pub async fn power(ctx: poise::Context<'_, (), Error>) -> Result<(), Error> {
ctx.defer().await?;
let ilo = ilo_data(RedfishEndpoint::Power).await.unwrap();
let data = ilo.downcast_ref::<Power>().unwrap();
let mut powerdata = String::new();
powerdata.push_str(&format!("**Power Capacity:** `{}w`\n", &data.power_capacity_watts));
powerdata.push_str(&format!("**Power Consumed:** `{}w`\n", &data.power_consumed_watts));
powerdata.push_str(&format!("**Average Power:** `{}w`\n", &data.power_metrics.average_consumed_watts));
powerdata.push_str(&format!("**Max Consumed:** `{}w`\n", &data.power_metrics.max_consumed_watts));
powerdata.push_str(&format!("**Min Consumed:** `{}w`", &data.power_metrics.min_consumed_watts));
ctx.send(CreateReply::default().embed(
CreateEmbed::new()
.color(BINARY_PROPERTIES.embed_color)
.timestamp(Timestamp::now())
.title(format!("{} - Power", ILO_HOSTNAME))
.description(powerdata)
)).await?;
Ok(())
}
/// Retrieve the server's system data
#[poise::command(slash_command)]
pub async fn system(ctx: poise::Context<'_, (), Error>) -> Result<(), Error> {
ctx.defer().await?;
let ilo = ilo_data(RedfishEndpoint::System).await.unwrap();
let data = ilo.downcast_ref::<System>().unwrap();
let mut bios_data = String::new();
let post_state = match data.oem.hp.post_state.as_str() {
"FinishedPost" => "Finished POST",
_ => "???"
};
if data.oem.hp.post_state != "FinishedPost" {
println!("iLO:PostState = {}", data.oem.hp.post_state);
}
bios_data.push_str(&format!("**POST:** `{}`\n", post_state));
bios_data.push_str(&format!("**Power:** `{}`\n", &data.power_state));
bios_data.push_str(&format!("**Model:** `{}`", &data.model));
ctx.send(CreateReply::default().embed(
CreateEmbed::new()
.color(BINARY_PROPERTIES.embed_color)
.timestamp(Timestamp::now())
.title(format!("{} - System", ILO_HOSTNAME))
.description(bios_data)
.fields(vec![
(format!("CPU ({}x)", data.processor_summary.count), data.processor_summary.cpu.trim().to_string(), true),
("RAM".to_string(), format!("{} GB", data.memory.total_system_memory), true)
])
)).await?;
Ok(())
}

101
src/commands/midi.rs Normal file
View File

@ -0,0 +1,101 @@
use crate::{
Error,
internals::utils::{
mention_dev,
format_bytes
}
};
use regex::Regex;
use std::{
os::unix::fs::MetadataExt,
fs::{
write,
remove_file,
metadata
}
};
use poise::{
CreateReply,
serenity_prelude::CreateAttachment
};
/// Convert MIDI file to WAV
#[poise::command(context_menu_command = "MIDI -> WAV")]
pub async fn midi_to_wav(
ctx: poise::Context<'_, (), Error>,
#[description = "MIDI file to be converted"] message: poise::serenity_prelude::Message
) -> Result<(), Error> {
let re = Regex::new(r"(?i)\.mid$").unwrap();
if !message.embeds.is_empty() || message.attachments.is_empty() || !re.is_match(&message.attachments[0].filename) {
ctx.reply("That ain't a MIDI file! What are you even doing??").await?;
return Ok(());
}
ctx.defer().await?;
let bytes = match message.attachments[0].download().await {
Ok(bytes) => bytes,
Err(y) => {
ctx.send(CreateReply::default()
.content(format!(
"Download failed, ask {} to check console for more information!",
mention_dev(ctx).unwrap_or_default()
))
)
.await.unwrap();
return Err(Error::from(format!("Failed to download the file: {}", y)))
}
};
let midi_path = &message.attachments[0].filename;
write(midi_path, bytes)?;
let wav_path = re.replace(&midi_path, ".wav");
let sf2_path = "/tmp/FluidR3_GM.sf2";
write(sf2_path, include_bytes!("../internals/assets/FluidR3_GM.sf2"))?;
let output = std::process::Command::new("fluidsynth")
.args(&["-ni", sf2_path, midi_path, "-F", &wav_path])
.output();
// Just to add an info to console to tell what the bot is doing when MIDI file is downloaded.
println!("Discord[{}:{}]: Processing MIDI file: \"{}\"", ctx.guild().unwrap().name, ctx.command().qualified_name, midi_path);
match output {
Ok(_) => {
let reply = ctx.send(CreateReply::default()
.attachment(CreateAttachment::path(&*wav_path).await.unwrap())
).await;
if reply.is_err() {
println!(
"Discord[{}:{}]: Processed file couldn't be uploaded back to Discord channel due to upload limit",
ctx.guild().unwrap().name, ctx.command().qualified_name
);
ctx.send(CreateReply::default()
.content(format!(
"Couldn't upload the processed file (`{}`, `{}`) due to upload limit",
&*wav_path, format_bytes(metadata(&*wav_path).unwrap().size())
))
).await.unwrap();
} else if reply.is_ok() {
remove_file(midi_path)?;
remove_file(&*wav_path)?;
}
},
Err(y) => {
ctx.send(CreateReply::default()
.content("Command didn't execute successfully, check console for more information!")
).await.unwrap();
return Err(Error::from(format!("Midi conversion failed: {}", y)))
}
}
Ok(())
}

View File

@ -69,8 +69,7 @@ fn process_pms_statuses(servers: Vec<(String, Vec<Value>)>) -> Vec<(String, Stri
/// Query the server statuses /// Query the server statuses
#[poise::command( #[poise::command(
slash_command, slash_command,
subcommands("wg"), subcommands("wg")
subcommand_required
)] )]
pub async fn status(_: poise::Context<'_, (), Error>) -> Result<(), Error> { pub async fn status(_: poise::Context<'_, (), Error>) -> Result<(), Error> {
Ok(()) Ok(())

View File

@ -9,12 +9,43 @@ use crate::{
use sysinfo::System; use sysinfo::System;
use uptime_lib::get; use uptime_lib::get;
use std::time::{ use std::{
Duration, fs::File,
SystemTime, path::Path,
UNIX_EPOCH time::{
Duration,
SystemTime,
UNIX_EPOCH
},
io::{
BufRead,
BufReader
}
}; };
fn get_os_info() -> String {
let path = Path::new("/etc/os-release");
let mut name = "BoringOS".to_string();
let mut version = "v0.0".to_string();
if let Ok(file) = File::open(&path) {
let reader = BufReader::new(file);
for line in reader.lines() {
if let Ok(line) = line {
if line.starts_with("NAME=") {
name = line.split('=').nth(1).unwrap_or_default().trim_matches('"').to_string();
} else if line.starts_with("VERSION=") {
version = line.split('=').nth(1).unwrap_or_default().trim_matches('"').to_string();
} else if line.starts_with("VERSION_ID=") {
version = line.split('=').nth(1).unwrap_or_default().trim_matches('"').to_string();
}
}
}
}
format!("{} {}", name, version)
}
/// Retrieve host and bot uptimes /// Retrieve host and bot uptimes
#[poise::command(slash_command)] #[poise::command(slash_command)]
pub async fn uptime(ctx: poise::Context<'_, (), Error>) -> Result<(), Error> { pub async fn uptime(ctx: poise::Context<'_, (), Error>) -> Result<(), Error> {
@ -22,13 +53,12 @@ pub async fn uptime(ctx: poise::Context<'_, (), Error>) -> Result<(), Error> {
let mut sys = System::new_all(); let mut sys = System::new_all();
sys.refresh_all(); sys.refresh_all();
// Fetch system's operating system
let sys_os_info = os_info::get();
let sys_os = format!("{} {}", sys_os_info.os_type(), sys_os_info.version());
// Fetch system's uptime // Fetch system's uptime
let sys_uptime = get().unwrap().as_secs(); let sys_uptime = get().unwrap().as_secs();
// Fetch system's processor
let cpu = sys.cpus();
// Fetch bot's process uptime // Fetch bot's process uptime
let curr_pid = sysinfo::get_current_pid().unwrap(); let curr_pid = sysinfo::get_current_pid().unwrap();
let now = SystemTime::now(); let now = SystemTime::now();
@ -42,7 +72,8 @@ pub async fn uptime(ctx: poise::Context<'_, (), Error>) -> Result<(), Error> {
format!("**{} {}**", _bot.name, BOT_VERSION.as_str()), format!("**{} {}**", _bot.name, BOT_VERSION.as_str()),
format!(">>> System: `{}`", format_duration(sys_uptime)), format!(">>> System: `{}`", format_duration(sys_uptime)),
format!("Process: `{}`", format_duration(proc_uptime)), format!("Process: `{}`", format_duration(proc_uptime)),
format!("OS: `{}`", sys_os) format!("CPU: `{}`", format!("{}", cpu[0].brand())),
format!("OS: `{}`", get_os_info())
]; ];
ctx.reply(concat_message(stat_msg)).await?; ctx.reply(concat_message(stat_msg)).await?;

View File

@ -1 +1,2 @@
pub mod cache;
// pub mod database; // pub mod database;

91
src/controllers/cache.rs Normal file
View File

@ -0,0 +1,91 @@
use crate::internals::utils::token_path;
use poise::serenity_prelude::prelude::TypeMapKey;
use bb8_redis::{
bb8::Pool,
redis::cmd,
redis::RedisError,
redis::RedisResult,
redis::AsyncCommands,
RedisConnectionManager
};
use tokio::time::{
sleep,
Duration
};
#[derive(Debug)]
pub struct RedisController {
pool: Pool<RedisConnectionManager>
}
impl TypeMapKey for RedisController {
type Value = RedisController;
}
impl RedisController {
pub async fn new() -> Result<Self, RedisError> {
let manager = RedisConnectionManager::new(token_path().await.redis_uri.as_str())?;
let pool = Self::create_pool(manager).await;
Ok(Self { pool })
}
async fn create_pool(manager: RedisConnectionManager) -> Pool<RedisConnectionManager> {
let mut backoff = 1;
loop {
match Pool::builder().max_size(20).retry_connection(true).build(manager.clone()).await {
Ok(pool) => {
match pool.get().await {
Ok(mut conn) => {
let ping: RedisResult<String> = cmd("PING").query_async(&mut *conn).await;
match ping {
Ok(_) => {
println!("Redis[Info]: Successfully connected");
return pool.clone();
},
Err(e) => {
eprintln!("Redis[Error]: {}, retrying in {} seconds", e, backoff);
Self::apply_backoff(&mut backoff).await;
}
}
},
Err(e) => {
eprintln!("Redis[ConnError]: {}, retrying in {} seconds", e, backoff);
Self::apply_backoff(&mut backoff).await;
}
}
}
Err(e) => {
eprintln!("Redis[PoolError]: {}, retrying in {} seconds", e, backoff);
Self::apply_backoff(&mut backoff).await;
}
}
}
}
async fn apply_backoff(backoff: &mut u64) {
sleep(Duration::from_secs(*backoff)).await;
if *backoff < 64 {
*backoff *= 2;
}
}
/// Get a key from the cache
pub async fn get(&self, key: &str) -> RedisResult<Option<String>> {
let mut conn = self.pool.get().await.unwrap();
conn.get(key).await
}
/// Set a key with a value in the cache
pub async fn set(&self, key: &str, value: &str) -> RedisResult<()> {
let mut conn = self.pool.get().await.unwrap();
conn.set(key, value).await
}
/// Set a key with an expiration time in seconds
pub async fn expire(&self, key: &str, seconds: i64) -> RedisResult<()> {
let mut conn = self.pool.get().await.unwrap();
conn.expire(key, seconds).await
}
}

View File

@ -1,6 +1,6 @@
use crate::internals::utils::token_path; use crate::internals::utils::token_path;
use once_cell::sync::Lazy; use std::sync::LazyLock;
use bb8::{Pool, PooledConnection}; use bb8::{Pool, PooledConnection};
use bb8_postgres::PostgresConnectionManager; use bb8_postgres::PostgresConnectionManager;
use tokio_postgres::{ use tokio_postgres::{
@ -19,7 +19,7 @@ use std::{
sync::Mutex sync::Mutex
}; };
pub static DATABASE: Lazy<Mutex<Option<DatabaseController>>> = Lazy::new(|| Mutex::new(None)); pub static DATABASE: LazyLock<Mutex<Option<DatabaseController>>> = LazyLock::new(|| Mutex::new(None));
pub struct DatabaseController { pub struct DatabaseController {
pub pool: Pool<PostgresConnectionManager<NoTls>> pub pool: Pool<PostgresConnectionManager<NoTls>>

View File

@ -1,4 +1,5 @@
pub mod config; pub mod config;
pub mod http; pub mod http;
pub mod tasks;
pub mod tsclient; pub mod tsclient;
pub mod utils; pub mod utils;

BIN
src/internals/assets/FluidR3_GM.sf2 (Stored with Git LFS) Normal file

Binary file not shown.

View File

@ -1,31 +1,34 @@
use once_cell::sync::Lazy; use std::sync::LazyLock;
pub struct ConfigMeta { pub struct ConfigMeta {
pub guild_id: u64, // pub guild_id: u64,
pub embed_color: i32, pub embed_color: i32,
pub ready_notify: u64, pub ready_notify: u64,
pub rss_channel: u64,
pub deploy_commands: bool, pub deploy_commands: bool,
pub developers: Vec<u64> pub developers: Vec<u64>
} }
#[cfg(feature = "production")] #[cfg(feature = "production")]
pub static BINARY_PROPERTIES: Lazy<ConfigMeta> = Lazy::new(|| ConfigMeta::new()); pub static BINARY_PROPERTIES: LazyLock<ConfigMeta> = LazyLock::new(|| ConfigMeta::new());
#[cfg(not(feature = "production"))] #[cfg(not(feature = "production"))]
pub static BINARY_PROPERTIES: Lazy<ConfigMeta> = Lazy::new(|| pub static BINARY_PROPERTIES: LazyLock<ConfigMeta> = LazyLock::new(||
ConfigMeta::new() ConfigMeta::new()
.guild_id(865673694184996885) // .guild_id(865673694184996885)
.embed_color(0xf1d63c) .embed_color(0xf1d63c)
.ready_notify(865673694184996888) .ready_notify(865673694184996888)
.rss_channel(865673694184996888)
.deploy_commands(false) .deploy_commands(false)
); );
impl ConfigMeta { impl ConfigMeta {
fn new() -> Self { fn new() -> Self {
Self { Self {
guild_id: 865673694184996885, // guild_id: 865673694184996885,
embed_color: 0x5a99c7, embed_color: 0x5a99c7,
ready_notify: 865673694184996888, ready_notify: 865673694184996888,
rss_channel: 865673694184996888,
deploy_commands: false, deploy_commands: false,
developers: vec![ developers: vec![
190407856527376384 // toast.ts 190407856527376384 // toast.ts
@ -34,11 +37,11 @@ impl ConfigMeta {
} }
// Scalable functions below; // Scalable functions below;
#[cfg(not(feature = "production"))] /* #[cfg(not(feature = "production"))]
fn guild_id(mut self, guild_id: u64) -> Self { fn guild_id(mut self, guild_id: u64) -> Self {
self.guild_id = guild_id; self.guild_id = guild_id;
self self
} } */
#[cfg(not(feature = "production"))] #[cfg(not(feature = "production"))]
fn embed_color(mut self, color: i32) -> Self { fn embed_color(mut self, color: i32) -> Self {
@ -52,6 +55,12 @@ impl ConfigMeta {
self self
} }
#[cfg(not(feature = "production"))]
fn rss_channel(mut self, channel_id: u64) -> Self {
self.rss_channel = channel_id;
self
}
#[cfg(not(feature = "production"))] #[cfg(not(feature = "production"))]
fn deploy_commands(mut self, deploy: bool) -> Self { fn deploy_commands(mut self, deploy: bool) -> Self {
self.deploy_commands = deploy; self.deploy_commands = deploy;

9
src/internals/tasks.rs Normal file
View File

@ -0,0 +1,9 @@
pub mod rss;
fn task_info(name: &str, message: &str) {
println!("{}", format!("TaskScheduler[{}]: {}", name, message));
}
fn task_err(name: &str, message: &str) {
eprintln!("{}", format!("TaskScheduler[{}:Error]: {}", name, message));
}

305
src/internals/tasks/rss.rs Normal file
View File

@ -0,0 +1,305 @@
use crate::{
Error,
controllers::cache::RedisController
};
use super::{
super::{
http::HttpClient,
config::BINARY_PROPERTIES
},
task_info,
task_err
};
use once_cell::sync::OnceCell;
use feed_rs::parser::parse;
use reqwest::Response;
use regex::Regex;
use std::{
sync::Arc,
io::Cursor
};
use poise::serenity_prelude::{
Context,
ChannelId,
CreateMessage,
CreateEmbed,
CreateEmbedAuthor,
Timestamp
};
use tokio::time::{
Duration,
interval
};
static REDIS_EXPIRY_SECS: i64 = 7200;
static REDIS_SERVICE: OnceCell<Arc<RedisController>> = OnceCell::new();
async fn redis_() {
let redis = RedisController::new().await.unwrap();
REDIS_SERVICE.set(Arc::new(redis)).unwrap();
}
async fn get_redis() -> Arc<RedisController> {
if REDIS_SERVICE.get().is_none() {
redis_().await;
}
REDIS_SERVICE.get().unwrap().clone()
}
fn format_href_to_discord(input: &str) -> String {
let re = Regex::new(r#"<a href="([^"]+)">([^<]+)</a>"#).unwrap();
re.replace_all(input, r"[$2]($1)").to_string()
}
fn format_html_to_discord(input: String) -> String {
let mut output = input;
// Replace all instances of <p> with newlines
output = Regex::new(r#"<\s*p\s*>"#).unwrap().replace_all(&output, "\n").to_string();
output = Regex::new(r#"<\s*/\s*p\s*>"#).unwrap().replace_all(&output, "\n").to_string();
// Replace all instances of <br> and <br /> with newlines
output = Regex::new(r#"<\s*br\s*>"#).unwrap().replace_all(&output, "\n").to_string();
output = Regex::new(r#"<\s*br\s*/\s*>"#).unwrap().replace_all(&output, "\n").to_string();
// Replace all instances of <strong> with **
output = Regex::new(r#"<\s*strong\s*>"#).unwrap().replace_all(&output, "**").to_string();
output = Regex::new(r#"<\s*/\s*strong\s*>"#).unwrap().replace_all(&output, "**").to_string();
// Replace all instances of <var> and <small> with nothing
output = Regex::new(r#"<\s*var\s*>"#).unwrap().replace_all(&output, "").to_string();
output = Regex::new(r#"<\s*/\s*var\s*>"#).unwrap().replace_all(&output, "").to_string();
output = Regex::new(r#"<\s*small\s*>"#).unwrap().replace_all(&output, "").to_string();
output = Regex::new(r#"<\s*/\s*small\s*>"#).unwrap().replace_all(&output, "").to_string();
// Remove any other HTML tags
output = Regex::new(r#"<[^>]+>"#).unwrap().replace_all(&output, "").to_string();
// Replace all instances of <a href="url">text</a> with [text](url)
output = format_href_to_discord(&output);
output
}
async fn fetch_feed(url: &str) -> Result<Response, Error> {
let http = HttpClient::new();
let res = match http.get(url, "RSS-Monitor").await {
Ok(res) => res,
Err(y) => return Err(y.into())
};
Ok(res)
}
async fn save_to_redis(key: &str, value: &str) -> Result<(), Error> {
let redis = get_redis().await;
redis.set(key, value).await.unwrap();
if let Err(y) = redis.expire(key, REDIS_EXPIRY_SECS).await {
task_err("RSS", format!("[RedisExpiry]: {}", y).as_str());
}
Ok(())
}
async fn esxi_embed() -> Result<Option<CreateEmbed>, Error> {
let redis = get_redis().await;
let rkey = "RSS_ESXi";
let url = "https://esxi-patches.v-front.de/atom/ESXi-7.0.0.xml";
let res = fetch_feed(url).await?;
let data = res.text().await?;
let cursor = Cursor::new(data);
let feed = parse(cursor).unwrap();
let home_page = feed.links[0].clone().href;
let article = feed.entries[0].clone();
fn get_patch_version(input: &str) -> Option<String> {
let re = Regex::new(r#"(?i)Update\s+([0-9]+)([a-z]?)"#).unwrap();
if let Some(caps) = re.captures(input) {
let update_num = caps[1].to_string();
let letter = caps.get(2).map_or("", |m| m.as_str());
Some(format!("Update {}{}", update_num, letter))
} else {
None
}
}
// This is for building up the embed with the feed data
// std::fs::File::create("esxi_atom.log").unwrap();
// std::fs::write("esxi_atom.log", format!("{:#?}", feed))?;
let cached_patch = redis.get(&rkey).await.unwrap().unwrap_or_default();
if cached_patch.is_empty() {
redis.set(&rkey, &article.categories[3].term).await.unwrap();
if let Err(y) = redis.expire(&rkey, REDIS_EXPIRY_SECS).await {
task_err("RSS", format!("[RedisExpiry]: {}", y).as_str());
}
return Ok(None);
}
if let Some(patch) = get_patch_version(&article.categories[3].term) {
if patch == cached_patch {
return Ok(None);
} else {
save_to_redis(&rkey, &article.categories[3].term).await?;
Ok(Some(CreateEmbed::new()
.color(0x4EFBCB)
.author(CreateEmbedAuthor::new(feed.title.unwrap().content).url(home_page))
.thumbnail(feed.logo.unwrap().uri)
.description(format!(
"{} {} for {} {} has been rolled out!\n{}",
article.categories[2].term,
article.categories[3].term,
article.categories[0].term,
article.categories[1].term,
format_href_to_discord(article.summary.unwrap().content.as_str())
))
.timestamp(Timestamp::from(article.updated.unwrap())))
)
}
} else {
task_err("RSS:ESXi", &format!("Article term does not match the expected RegEx pattern! ({})", article.categories[3].term.as_str()));
Ok(None)
}
}
async fn gportal_embed() -> Result<Option<CreateEmbed>, Error> {
let redis = get_redis().await;
let rkey = "RSS_GPortal";
let url = "https://status.g-portal.com/history.atom";
let res = fetch_feed(url).await?;
let data = res.text().await?;
let cursor = Cursor::new(data);
let feed = parse(cursor).unwrap();
let incident_page = feed.links[0].clone().href;
let article = feed.entries[0].clone();
fn get_incident_id(input: &str) -> Option<String> {
let re = Regex::new(r#"/incidents/([a-zA-Z0-9]+)$"#).unwrap();
if let Some(caps) = re.captures(input) {
Some(caps[1].to_string())
} else {
None
}
}
// This is for building up the embed with the feed data
// std::fs::File::create("gportal.log").unwrap();
// std::fs::write("gportal.log", format!("{:#?}", feed))?;
let cached_incident = redis.get(&rkey).await.unwrap().unwrap_or_default();
if cached_incident.is_empty() {
redis.set(&rkey, get_incident_id(&article.links[0].href).unwrap().as_str()).await.unwrap();
if let Err(y) = redis.expire(&rkey, REDIS_EXPIRY_SECS).await {
task_err("RSS", format!("[RedisExpiry]: {}", y).as_str());
}
return Ok(None);
}
if let Some(incident) = get_incident_id(&article.links[0].href) {
if incident == cached_incident {
return Ok(None);
} else {
save_to_redis(&rkey, &incident).await?;
Ok(Some(CreateEmbed::new()
.color(0xC23EE8)
.title(article.title.unwrap().content)
.url(incident_page)
.description(format!("{}", format_html_to_discord(article.content.unwrap().body.unwrap())
))
.timestamp(Timestamp::from(article.updated.unwrap())))
)
}
} else {
task_err("RSS:GPortal", &format!("Incident ID does not match the expected RegEx pattern! ({})", &article.links[0].href));
Ok(None)
}
}
async fn rust_message() -> Result<Option<String>, Error> {
let redis = get_redis().await;
let rkey = "RSS_RustBlog";
let url = "https://blog.rust-lang.org/feed.xml";
let res = fetch_feed(url).await?;
let data = res.text().await?;
let cursor = Cursor::new(data);
let feed = parse(cursor).unwrap();
let article = feed.entries[0].clone();
let article_id = article.id.clone();
fn get_blog_title(input: String) -> Option<String> {
let re = Regex::new(r"https://blog\.rust-lang\.org/(\d{4}/\d{2}/\d{2}/[^/]+)").unwrap();
re.captures(input.as_str()).and_then(|caps| caps.get(1).map(|m| m.as_str().to_string()))
}
// This is for building up the message with the feed data
// std::fs::File::create("rustblog.log").unwrap();
// std::fs::write("rustblog.log", format!("{:#?}", feed))?;
let cached_blog = redis.get(&rkey).await.unwrap().unwrap_or_default();
if cached_blog.is_empty() {
redis.set(&rkey, get_blog_title(article.id).unwrap().as_str()).await.unwrap();
if let Err(y) = redis.expire(&rkey, REDIS_EXPIRY_SECS).await {
task_err("RSS", format!("[RedisExpiry]: {}", y).as_str());
}
return Ok(None);
}
if let Some(blog) = get_blog_title(article.id) {
if blog == cached_blog {
return Ok(None);
} else {
save_to_redis(&rkey, &blog).await?;
Ok(Some(format!("Rust Team has put out a new article!\n**[{}](<{}>)**", article.links[0].title.clone().unwrap(), article.links[0].href)))
}
} else {
task_err("RSS:RustBlog", &format!("Article URL does not match the expected RegEx pattern! ({})", article_id));
Ok(None)
}
}
pub async fn rss(ctx: Arc<Context>) -> Result<(), Error> {
let task_name = "RSS";
let mut interval = interval(Duration::from_secs(900));
task_info(&task_name, "Task loaded!");
loop {
interval.tick().await;
match esxi_embed().await {
Ok(Some(embed)) => {
ChannelId::new(BINARY_PROPERTIES.rss_channel).send_message(&ctx.http, CreateMessage::new().add_embed(embed)).await.unwrap();
},
Ok(None) => (),
Err(y) => task_err(&task_name, &y.to_string())
}
match gportal_embed().await {
Ok(Some(embed)) => {
ChannelId::new(BINARY_PROPERTIES.rss_channel).send_message(&ctx.http, CreateMessage::new()
.content("*Uh-oh! G-Portal is having issues!*").add_embed(embed)
).await.unwrap();
},
Ok(None) => (),
Err(y) => task_err(&task_name, &y.to_string())
}
match rust_message().await {
Ok(Some(content)) => {
ChannelId::new(BINARY_PROPERTIES.rss_channel).send_message(&ctx.http, CreateMessage::new().content(content)).await.unwrap();
},
Ok(None) => (),
Err(y) => task_err(&task_name, &y.to_string())
}
}
}

View File

@ -1,11 +1,11 @@
use once_cell::sync::Lazy; use std::sync::LazyLock;
use tokio::sync::RwLock; use tokio::sync::RwLock;
use tokenservice_client::{ use tokenservice_client::{
TokenService, TokenService,
TokenServiceApi TokenServiceApi
}; };
static TS_GLOBAL_CACHE: Lazy<RwLock<Option<TokenServiceApi>>> = Lazy::new(|| RwLock::new(None)); static TS_GLOBAL_CACHE: LazyLock<RwLock<Option<TokenServiceApi>>> = LazyLock::new(|| RwLock::new(None));
pub struct TSClient(TokenService); pub struct TSClient(TokenService);

View File

@ -1,9 +1,10 @@
use once_cell::sync::Lazy; use poise::serenity_prelude::UserId;
use std::sync::LazyLock;
use tokio::sync::Mutex; use tokio::sync::Mutex;
use tokenservice_client::TokenServiceApi; use tokenservice_client::TokenServiceApi;
use super::tsclient::TSClient; use super::tsclient::TSClient;
pub static BOT_VERSION: Lazy<String> = Lazy::new(|| { pub static BOT_VERSION: LazyLock<String> = LazyLock::new(|| {
let cargo_version = cargo_toml::Manifest::from_str(&include_str!("../../Cargo.toml")) let cargo_version = cargo_toml::Manifest::from_str(&include_str!("../../Cargo.toml"))
.unwrap() .unwrap()
.package .package
@ -13,7 +14,7 @@ pub static BOT_VERSION: Lazy<String> = Lazy::new(|| {
format!("v{}", cargo_version) format!("v{}", cargo_version)
}); });
static TSCLIENT: Lazy<Mutex<TSClient>> = Lazy::new(|| Mutex::new(TSClient::new())); static TSCLIENT: LazyLock<Mutex<TSClient>> = LazyLock::new(|| Mutex::new(TSClient::new()));
pub async fn token_path() -> TokenServiceApi { pub async fn token_path() -> TokenServiceApi {
TSCLIENT.lock().await.get().await.unwrap() TSCLIENT.lock().await.get().await.unwrap()
@ -23,6 +24,25 @@ pub fn concat_message(messages: Vec<String>) -> String {
messages.join("\n") messages.join("\n")
} }
pub fn mention_dev(ctx: poise::Context<'_, (), crate::Error>) -> Option<String> {
let devs = super::config::BINARY_PROPERTIES.developers.clone();
let app_owners = ctx.framework().options().owners.clone();
let mut mentions = Vec::new();
for dev in devs {
if app_owners.contains(&UserId::new(dev)) {
mentions.push(format!("<@{}>", dev));
}
}
if mentions.is_empty() {
None
} else {
Some(mentions.join(", "))
}
}
pub fn format_duration(secs: u64) -> String { pub fn format_duration(secs: u64) -> String {
let days = secs / 86400; let days = secs / 86400;
let hours = (secs % 86400) / 3600; let hours = (secs % 86400) / 3600;
@ -43,3 +63,24 @@ pub fn format_duration(secs: u64) -> String {
formatted_string formatted_string
} }
pub fn format_bytes(bytes: u64) -> String {
let units = ["B", "KB", "MB", "GB", "TB", "PB"];
let mut value = bytes as f64;
let mut unit = units[0];
for &u in &units[1..] {
if value < 1024.0 {
break;
}
value /= 1024.0;
unit = u;
}
if unit == "B" {
format!("{}{}", value, unit)
} else {
format!("{:.2}{}", value, unit)
}
}

View File

@ -6,13 +6,19 @@ mod internals;
use crate::{ use crate::{
internals::{ internals::{
utils::token_path, utils::{
token_path,
mention_dev
},
config::BINARY_PROPERTIES config::BINARY_PROPERTIES
}, },
// controllers::database::DatabaseController // controllers::database::DatabaseController
}; };
use std::error; use std::{
thread::current,
sync::Arc
};
use poise::serenity_prelude::{ use poise::serenity_prelude::{
builder::{ builder::{
CreateMessage, CreateMessage,
@ -25,11 +31,10 @@ use poise::serenity_prelude::{
ClientBuilder, ClientBuilder,
ChannelId, ChannelId,
Command, Command,
UserId,
GatewayIntents GatewayIntents
}; };
type Error = Box<dyn error::Error + Send + Sync>; type Error = Box<dyn std::error::Error + Send + Sync>;
async fn on_ready( async fn on_ready(
ctx: &Context, ctx: &Context,
@ -75,14 +80,61 @@ async fn on_ready(
} }
async fn event_processor( async fn event_processor(
_ctx: &Context, ctx: &Context,
event: &FullEvent, event: &FullEvent,
_framework: poise::FrameworkContext<'_, (), Error> framework: poise::FrameworkContext<'_, (), Error>
) -> Result<(), Error> { ) -> Result<(), Error> {
match event { match event {
FullEvent::Ratelimit { data } => { FullEvent::Ratelimit { data } => {
println!("Event[Ratelimit]: {:#?}", data); println!("Event[Ratelimit]: {:#?}", data);
} }
FullEvent::Message { new_message } => {
if new_message.author.bot || !new_message.guild_id.is_none() {
return Ok(());
}
if new_message.content.to_lowercase().starts_with("deploy") && new_message.author.id == BINARY_PROPERTIES.developers[0] {
let builder = poise::builtins::create_application_commands(&framework.options().commands);
let commands = Command::set_global_commands(&ctx.http, builder).await;
let mut commands_deployed = std::collections::HashSet::new();
match commands {
Ok(cmdmap) => for command in cmdmap.iter() {
commands_deployed.insert(command.name.clone());
},
Err(y) => {
eprintln!("Error registering commands: {:?}", y);
new_message.reply(&ctx.http, "Deployment failed, check console for more details!").await?;
}
}
if commands_deployed.len() > 0 {
new_message.reply(&ctx.http, format!(
"Deployed the commands globally:\n- {}",
commands_deployed.into_iter().collect::<Vec<_>>().join("\n- ")
)).await?;
}
}
}
FullEvent::Ready { .. } => {
let thread_id = format!("{:?}", current().id());
let thread_num: String = thread_id.chars().filter(|c| c.is_digit(10)).collect();
println!("Event[Ready]: Task Scheduler operating on thread {}", thread_num);
let ctx = Arc::new(ctx.clone());
tokio::spawn(async move {
match internals::tasks::rss::rss(ctx).await {
Ok(_) => {},
Err(y) => {
eprintln!("TaskScheduler[Main:RSS:Error]: Task execution failed: {}", y);
if let Some(source) = y.source() {
eprintln!("TaskScheduler[Main:RSS:Error]: Task execution failed caused by: {:#?}", source);
}
}
}
});
}
_ => {} _ => {}
} }
@ -96,8 +148,10 @@ async fn main() {
let framework = poise::Framework::builder() let framework = poise::Framework::builder()
.options(poise::FrameworkOptions { .options(poise::FrameworkOptions {
commands: vec![ commands: vec![
commands::ilo::ilo(),
commands::ping::ping(), commands::ping::ping(),
commands::status::status(), commands::status::status(),
commands::midi::midi_to_wav(),
commands::uptime::uptime() commands::uptime::uptime()
], ],
pre_command: |ctx| Box::pin(async move { pre_command: |ctx| Box::pin(async move {
@ -112,12 +166,8 @@ async fn main() {
poise::FrameworkError::Command { error, ctx, .. } => { poise::FrameworkError::Command { error, ctx, .. } => {
println!("PoiseCommandError({}): {}", ctx.command().qualified_name, error); println!("PoiseCommandError({}): {}", ctx.command().qualified_name, error);
ctx.reply(format!( ctx.reply(format!(
"Encountered an error during command execution, ask **{}** to check console for more details!", "Encountered an error during command execution, ask {} to check console for more details!",
UserId::new(BINARY_PROPERTIES.developers[0]) mention_dev(ctx).unwrap_or_default()
.to_user(&ctx.http())
.await.expect("Error getting user")
.nick_in(&ctx.http(), BINARY_PROPERTIES.guild_id)
.await.expect("Error getting nickname")
)).await.expect("Error sending message"); )).await.expect("Error sending message");
}, },
poise::FrameworkError::EventHandler { error, event, .. } => println!("PoiseEventHandlerError({}): {}", event.snake_case_name(), error), poise::FrameworkError::EventHandler { error, event, .. } => println!("PoiseEventHandlerError({}): {}", event.snake_case_name(), error),
@ -140,6 +190,8 @@ async fn main() {
let mut client = ClientBuilder::new( let mut client = ClientBuilder::new(
token_path().await.main, token_path().await.main,
GatewayIntents::GUILDS GatewayIntents::GUILDS
| GatewayIntents::MESSAGE_CONTENT
| GatewayIntents::DIRECT_MESSAGES
) )
.framework(framework) .framework(framework)
.await.expect("Error creating client"); .await.expect("Error creating client");