Optimize iLO and remove unused stuff
This commit is contained in:
parent
31f3d6854f
commit
235629be37
1
.gitattributes
vendored
1
.gitattributes
vendored
@ -1 +0,0 @@
|
|||||||
libs/assets/FluidR3_GM.sf2 filter=lfs diff=lfs merge=lfs -text
|
|
456
Cargo.lock
generated
456
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
14
Cargo.toml
14
Cargo.toml
@ -1,19 +1,17 @@
|
|||||||
[package]
|
[package]
|
||||||
name = "kon"
|
name = "kon"
|
||||||
version = "0.6.4"
|
version = "0.6.5"
|
||||||
edition = "2024"
|
edition = "2024"
|
||||||
|
|
||||||
[workspace]
|
[workspace]
|
||||||
members = ["cmds", "libs", "repo", "tasks", "tokens"]
|
members = ["cmds", "libs", "repo", "tokens"]
|
||||||
|
|
||||||
[workspace.dependencies]
|
[workspace.dependencies]
|
||||||
bb8 = "0.9.0"
|
bb8 = "0.9.0"
|
||||||
bb8-redis = "0.18.0"
|
bb8-redis = "0.20.0"
|
||||||
cargo_toml = "0.21.0"
|
cargo_toml = "0.21.0"
|
||||||
feed-rs = "2.3.1"
|
dashmap = "6.1.0"
|
||||||
once_cell = "1.20.2"
|
|
||||||
poise = "0.6.1"
|
poise = "0.6.1"
|
||||||
regex = "1.11.1"
|
|
||||||
reqwest = { version = "0.12.12", features = ["json", "native-tls-vendored"] }
|
reqwest = { version = "0.12.12", features = ["json", "native-tls-vendored"] }
|
||||||
serde = "1.0.217"
|
serde = "1.0.217"
|
||||||
serde_json = "1.0.134"
|
serde_json = "1.0.134"
|
||||||
@ -28,7 +26,6 @@ kon_repo = { path = "repo" }
|
|||||||
[dependencies]
|
[dependencies]
|
||||||
kon_cmds = { path = "cmds" }
|
kon_cmds = { path = "cmds" }
|
||||||
kon_libs = { workspace = true }
|
kon_libs = { workspace = true }
|
||||||
kon_tasks = { path = "tasks" }
|
|
||||||
kon_tokens = { workspace = true }
|
kon_tokens = { workspace = true }
|
||||||
poise = { workspace = true }
|
poise = { workspace = true }
|
||||||
tokio = { workspace = true }
|
tokio = { workspace = true }
|
||||||
@ -37,8 +34,7 @@ tokio = { workspace = true }
|
|||||||
poise = { git = "https://github.com/serenity-rs/poise", branch = "next" }
|
poise = { git = "https://github.com/serenity-rs/poise", branch = "next" }
|
||||||
|
|
||||||
[features]
|
[features]
|
||||||
production = ["kon_libs/production", "kon_tasks/production"]
|
production = ["kon_libs/production"]
|
||||||
rss = ["kon_tasks/rss"]
|
|
||||||
|
|
||||||
[[bin]]
|
[[bin]]
|
||||||
name = "kon"
|
name = "kon"
|
||||||
|
@ -2,9 +2,9 @@ FROM scratch AS base
|
|||||||
WORKDIR /builder
|
WORKDIR /builder
|
||||||
COPY . .
|
COPY . .
|
||||||
|
|
||||||
FROM alpine:3.21
|
FROM adelielinux/adelie:1.0-beta6
|
||||||
LABEL org.opencontainers.image.source="https://git.toast-server.net/toast/Kon"
|
LABEL org.opencontainers.image.source="https://git.toast-server.net/toast/Kon"
|
||||||
RUN apk add --no-cache libgcc fluidsynth
|
RUN apk add --no-cache libgcc
|
||||||
WORKDIR /kon
|
WORKDIR /kon
|
||||||
COPY --from=base /builder/target/x86_64-unknown-linux-musl/release/kon .
|
COPY --from=base /builder/target/x86_64-unknown-linux-musl/release/kon .
|
||||||
CMD [ "./kon" ]
|
CMD [ "./kon" ]
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
[package]
|
[package]
|
||||||
name = "kon_cmds"
|
name = "kon_cmds"
|
||||||
version = "0.1.2"
|
version = "0.1.3"
|
||||||
edition = "2024"
|
edition = "2024"
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
@ -8,10 +8,10 @@ kon_libs = { workspace = true }
|
|||||||
kon_tokens = { workspace = true }
|
kon_tokens = { workspace = true }
|
||||||
lazy_static = { workspace = true }
|
lazy_static = { workspace = true }
|
||||||
poise = { workspace = true }
|
poise = { workspace = true }
|
||||||
regex = { workspace = true }
|
|
||||||
reqwest = { workspace = true }
|
reqwest = { workspace = true }
|
||||||
serde = { workspace = true }
|
serde = { workspace = true }
|
||||||
serde_json = { workspace = true }
|
serde_json = { workspace = true }
|
||||||
sysinfo = { workspace = true }
|
sysinfo = { workspace = true }
|
||||||
tokio = { workspace = true }
|
tokio = { workspace = true }
|
||||||
uptime_lib = { workspace = true }
|
uptime_lib = { workspace = true }
|
||||||
|
dashmap = { workspace = true }
|
||||||
|
@ -1,5 +1,4 @@
|
|||||||
mod ilo;
|
mod ilo;
|
||||||
mod midi;
|
|
||||||
mod status;
|
mod status;
|
||||||
mod uptime;
|
mod uptime;
|
||||||
|
|
||||||
@ -12,7 +11,6 @@ use kon_libs::{
|
|||||||
|
|
||||||
use {
|
use {
|
||||||
ilo::ilo,
|
ilo::ilo,
|
||||||
midi::midi_to_wav,
|
|
||||||
status::status,
|
status::status,
|
||||||
uptime::uptime
|
uptime::uptime
|
||||||
};
|
};
|
||||||
@ -23,7 +21,7 @@ macro_rules! commands {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn register_cmds() -> Vec<poise::Command<KonData, KonError>> { commands!(deploy, ping, ilo, midi_to_wav, status, uptime) }
|
pub fn register_cmds() -> Vec<poise::Command<KonData, KonError>> { commands!(deploy, ping, ilo, status, uptime) }
|
||||||
|
|
||||||
/// Deploy the commands globally or in a guild
|
/// Deploy the commands globally or in a guild
|
||||||
#[poise::command(prefix_command, owners_only, guild_only)]
|
#[poise::command(prefix_command, owners_only, guild_only)]
|
||||||
|
@ -1,4 +1,5 @@
|
|||||||
use {
|
use {
|
||||||
|
dashmap::DashMap,
|
||||||
kon_libs::{
|
kon_libs::{
|
||||||
BINARY_PROPERTIES,
|
BINARY_PROPERTIES,
|
||||||
KonResult
|
KonResult
|
||||||
@ -21,114 +22,117 @@ use {
|
|||||||
Deserialize,
|
Deserialize,
|
||||||
Serialize,
|
Serialize,
|
||||||
de::DeserializeOwned
|
de::DeserializeOwned
|
||||||
}
|
},
|
||||||
|
tokio::time::Duration
|
||||||
};
|
};
|
||||||
|
|
||||||
const ILO_HOSTNAME: &str = "POMNI";
|
const ILO_HOSTNAME: &str = "POMNI";
|
||||||
|
|
||||||
lazy_static! {
|
lazy_static! {
|
||||||
static ref REQWEST_CLIENT: Client = ClientBuilder::new().danger_accept_invalid_certs(true).build().unwrap();
|
static ref REQWEST_CLIENT: Client = ClientBuilder::new()
|
||||||
|
.danger_accept_invalid_certs(true)
|
||||||
|
.timeout(Duration::from_secs(15))
|
||||||
|
.pool_max_idle_per_host(6)
|
||||||
|
.pool_idle_timeout(Some(Duration::from_secs(30)))
|
||||||
|
.tcp_keepalive(Duration::from_secs(600))
|
||||||
|
.build()
|
||||||
|
.unwrap();
|
||||||
|
static ref SENSOR_NAMES: DashMap<&'static str, &'static str> = {
|
||||||
|
let m = DashMap::new();
|
||||||
|
m.insert("01-Inlet Ambient", "Inlet Ambient");
|
||||||
|
m.insert("04-P1 DIMM 1-6", "P1 DIMM 1-6");
|
||||||
|
m.insert("14-Chipset Zone", "Chipset Zone");
|
||||||
|
m
|
||||||
|
};
|
||||||
|
static ref POST_STATES: DashMap<&'static str, &'static str> = {
|
||||||
|
let m = DashMap::new();
|
||||||
|
m.insert("FinishedPost", "Finished POST");
|
||||||
|
m.insert("InPost", "In POST (Booting)");
|
||||||
|
m.insert("PowerOff", "Powered off");
|
||||||
|
m
|
||||||
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Serialize, Deserialize)]
|
#[derive(Serialize, Deserialize)]
|
||||||
|
#[serde(rename_all = "PascalCase")]
|
||||||
struct Chassis {
|
struct Chassis {
|
||||||
#[serde(rename = "Fans")]
|
|
||||||
fans: Vec<Fan>,
|
fans: Vec<Fan>,
|
||||||
#[serde(rename = "Temperatures")]
|
|
||||||
temperatures: Vec<Temperature>
|
temperatures: Vec<Temperature>
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Serialize, Deserialize)]
|
#[derive(Serialize, Deserialize)]
|
||||||
|
#[serde(rename_all = "PascalCase")]
|
||||||
struct Fan {
|
struct Fan {
|
||||||
#[serde(rename = "CurrentReading")]
|
|
||||||
current_reading: i32,
|
current_reading: i32,
|
||||||
#[serde(rename = "FanName")]
|
|
||||||
fan_name: String,
|
fan_name: String,
|
||||||
#[serde(rename = "Status")]
|
|
||||||
status: Status
|
status: Status
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Serialize, Deserialize)]
|
#[derive(Serialize, Deserialize)]
|
||||||
|
#[serde(rename_all = "PascalCase")]
|
||||||
struct Temperature {
|
struct Temperature {
|
||||||
#[serde(rename = "CurrentReading")]
|
|
||||||
current_reading: i32,
|
current_reading: i32,
|
||||||
#[serde(rename = "Name")]
|
|
||||||
name: String,
|
name: String,
|
||||||
#[serde(rename = "ReadingCelsius")]
|
|
||||||
reading_celsius: i32,
|
reading_celsius: i32,
|
||||||
#[serde(rename = "Status")]
|
|
||||||
status: Status,
|
status: Status,
|
||||||
#[serde(rename = "Units")]
|
|
||||||
units: String,
|
units: String,
|
||||||
#[serde(rename = "UpperThresholdCritical")]
|
|
||||||
upper_threshold_critical: i32,
|
upper_threshold_critical: i32,
|
||||||
#[serde(rename = "UpperThresholdFatal")]
|
|
||||||
upper_threshold_fatal: i32
|
upper_threshold_fatal: i32
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Serialize, Deserialize)]
|
#[derive(Serialize, Deserialize)]
|
||||||
|
#[serde(rename_all = "PascalCase")]
|
||||||
struct Status {
|
struct Status {
|
||||||
#[serde(rename = "Health")]
|
|
||||||
health: Option<String>,
|
health: Option<String>,
|
||||||
#[serde(rename = "State")]
|
|
||||||
state: String
|
state: String
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Serialize, Deserialize, Debug)]
|
#[derive(Serialize, Deserialize, Debug)]
|
||||||
|
#[serde(rename_all = "PascalCase")]
|
||||||
struct Power {
|
struct Power {
|
||||||
#[serde(rename = "PowerCapacityWatts")]
|
|
||||||
power_capacity_watts: i32,
|
power_capacity_watts: i32,
|
||||||
#[serde(rename = "PowerConsumedWatts")]
|
|
||||||
power_consumed_watts: i32,
|
power_consumed_watts: i32,
|
||||||
#[serde(rename = "PowerMetrics")]
|
|
||||||
power_metrics: PowerMetrics
|
power_metrics: PowerMetrics
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Serialize, Deserialize, Debug)]
|
#[derive(Serialize, Deserialize, Debug)]
|
||||||
|
#[serde(rename_all = "PascalCase")]
|
||||||
struct PowerMetrics {
|
struct PowerMetrics {
|
||||||
#[serde(rename = "AverageConsumedWatts")]
|
|
||||||
average_consumed_watts: i32,
|
average_consumed_watts: i32,
|
||||||
#[serde(rename = "MaxConsumedWatts")]
|
|
||||||
max_consumed_watts: i32,
|
max_consumed_watts: i32,
|
||||||
#[serde(rename = "MinConsumedWatts")]
|
|
||||||
min_consumed_watts: i32
|
min_consumed_watts: i32
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Serialize, Deserialize)]
|
#[derive(Serialize, Deserialize, Debug)]
|
||||||
|
#[serde(rename_all = "PascalCase")]
|
||||||
struct System {
|
struct System {
|
||||||
#[serde(rename = "Memory")]
|
|
||||||
memory: Memory,
|
memory: Memory,
|
||||||
#[serde(rename = "Model")]
|
|
||||||
model: String,
|
model: String,
|
||||||
#[serde(rename = "Oem")]
|
|
||||||
oem: Oem,
|
oem: Oem,
|
||||||
#[serde(rename = "PowerState")]
|
|
||||||
power_state: String,
|
power_state: String,
|
||||||
#[serde(rename = "ProcessorSummary")]
|
|
||||||
processor_summary: ProcessorSummary
|
processor_summary: ProcessorSummary
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Serialize, Deserialize)]
|
#[derive(Serialize, Deserialize, Debug)]
|
||||||
struct Memory {
|
struct Memory {
|
||||||
#[serde(rename = "TotalSystemMemoryGB")]
|
#[serde(rename = "TotalSystemMemoryGB")]
|
||||||
total_system_memory: i32
|
total_system_memory_gb: i32
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Serialize, Deserialize)]
|
#[derive(Serialize, Deserialize, Debug)]
|
||||||
|
#[serde(rename_all = "PascalCase")]
|
||||||
struct ProcessorSummary {
|
struct ProcessorSummary {
|
||||||
#[serde(rename = "Count")]
|
|
||||||
count: i32,
|
count: i32,
|
||||||
#[serde(rename = "Model")]
|
model: String
|
||||||
cpu: String
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Serialize, Deserialize)]
|
#[derive(Serialize, Deserialize, Debug)]
|
||||||
struct Oem {
|
struct Oem {
|
||||||
#[serde(rename = "Hp")]
|
#[serde(rename = "Hp")]
|
||||||
hp: Hp
|
hp: Hp
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Serialize, Deserialize)]
|
#[derive(Serialize, Deserialize, Debug)]
|
||||||
struct Hp {
|
struct Hp {
|
||||||
#[serde(rename = "PostState")]
|
#[serde(rename = "PostState")]
|
||||||
post_state: String
|
post_state: String
|
||||||
@ -148,12 +152,10 @@ struct Iml {
|
|||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Serialize, Deserialize)]
|
#[derive(Serialize, Deserialize)]
|
||||||
|
#[serde(rename_all = "PascalCase")]
|
||||||
struct ImlEntry {
|
struct ImlEntry {
|
||||||
#[serde(rename = "Created")]
|
|
||||||
created: String,
|
created: String,
|
||||||
#[serde(rename = "Message")]
|
|
||||||
message: String,
|
message: String,
|
||||||
#[serde(rename = "Severity")]
|
|
||||||
severity: String
|
severity: String
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -251,12 +253,7 @@ async fn temperature(ctx: super::PoiseCtx<'_>) -> KonResult<()> {
|
|||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
let name = match temp.name.as_str() {
|
let name = SENSOR_NAMES.get(temp.name.as_str()).map(|s| *s).unwrap_or("Unknown sensor");
|
||||||
"01-Inlet Ambient" => "Inlet Ambient",
|
|
||||||
"04-P1 DIMM 1-6" => "P1 DIMM 1-6",
|
|
||||||
"14-Chipset Zone" => "Chipset Zone",
|
|
||||||
_ => "Unknown Sensor"
|
|
||||||
};
|
|
||||||
|
|
||||||
tempdata.push_str(&format!("**{name}:** `{}°C`\n", temp.reading_celsius));
|
tempdata.push_str(&format!("**{name}:** `{}°C`\n", temp.reading_celsius));
|
||||||
}
|
}
|
||||||
@ -285,13 +282,14 @@ async fn power(ctx: super::PoiseCtx<'_>) -> KonResult<()> {
|
|||||||
ctx.defer().await?;
|
ctx.defer().await?;
|
||||||
let data: Power = ilo_data(RedfishEndpoint::Power).await?;
|
let data: Power = ilo_data(RedfishEndpoint::Power).await?;
|
||||||
|
|
||||||
let mut powerdata = String::new();
|
let powerdata = format!(
|
||||||
|
"**Power Capacity:** `{}w`\n**Power Consumed:** `{}w`\n**Average Power:** `{}w`\n**Max Consumed:** `{}w`\n**Min Consumed:** `{}w`",
|
||||||
powerdata.push_str(&format!("**Power Capacity:** `{}w`\n", &data.power_capacity_watts));
|
data.power_capacity_watts,
|
||||||
powerdata.push_str(&format!("**Power Consumed:** `{}w`\n", &data.power_consumed_watts));
|
data.power_consumed_watts,
|
||||||
powerdata.push_str(&format!("**Average Power:** `{}w`\n", &data.power_metrics.average_consumed_watts));
|
data.power_metrics.average_consumed_watts,
|
||||||
powerdata.push_str(&format!("**Max Consumed:** `{}w`\n", &data.power_metrics.max_consumed_watts));
|
data.power_metrics.max_consumed_watts,
|
||||||
powerdata.push_str(&format!("**Min Consumed:** `{}w`", &data.power_metrics.min_consumed_watts));
|
data.power_metrics.min_consumed_watts
|
||||||
|
);
|
||||||
|
|
||||||
ctx
|
ctx
|
||||||
.send(CreateReply::default().embed(embed_builder("Power", Some(powerdata), None)))
|
.send(CreateReply::default().embed(embed_builder("Power", Some(powerdata), None)))
|
||||||
@ -312,12 +310,11 @@ async fn system(ctx: super::PoiseCtx<'_>) -> KonResult<()> {
|
|||||||
|
|
||||||
let mut data = String::new();
|
let mut data = String::new();
|
||||||
|
|
||||||
let post_state = match ilo_sys.oem.hp.post_state.as_str() {
|
let post_state = POST_STATES
|
||||||
"FinishedPost" => "Finished POST",
|
.get(ilo_sys.oem.hp.post_state.as_str())
|
||||||
"InPost" => "In POST (Booting)",
|
.map(|s| *s)
|
||||||
"PowerOff" => "Powered off",
|
.unwrap_or("Unknown POST state");
|
||||||
_ => "Unknown State"
|
|
||||||
};
|
|
||||||
if ilo_sys.oem.hp.post_state != "FinishedPost" {
|
if ilo_sys.oem.hp.post_state != "FinishedPost" {
|
||||||
println!("iLO:PostState = {}", ilo_sys.oem.hp.post_state);
|
println!("iLO:PostState = {}", ilo_sys.oem.hp.post_state);
|
||||||
}
|
}
|
||||||
@ -337,10 +334,10 @@ async fn system(ctx: super::PoiseCtx<'_>) -> KonResult<()> {
|
|||||||
Some(vec![
|
Some(vec![
|
||||||
(
|
(
|
||||||
format!("CPU ({}x)", ilo_sys.processor_summary.count),
|
format!("CPU ({}x)", ilo_sys.processor_summary.count),
|
||||||
ilo_sys.processor_summary.cpu.trim().to_string(),
|
ilo_sys.processor_summary.model.trim().to_string(),
|
||||||
true
|
true
|
||||||
),
|
),
|
||||||
("RAM".to_string(), format!("{} GB", ilo_sys.memory.total_system_memory), true),
|
("RAM".to_string(), format!("{} GB", ilo_sys.memory.total_system_memory_gb), true),
|
||||||
])
|
])
|
||||||
)))
|
)))
|
||||||
.await?;
|
.await?;
|
||||||
|
@ -1,113 +0,0 @@
|
|||||||
use {
|
|
||||||
kon_libs::{
|
|
||||||
KonError,
|
|
||||||
KonResult,
|
|
||||||
format_bytes,
|
|
||||||
mention_dev
|
|
||||||
},
|
|
||||||
poise::{
|
|
||||||
CreateReply,
|
|
||||||
serenity_prelude::{
|
|
||||||
CreateAttachment,
|
|
||||||
Message
|
|
||||||
}
|
|
||||||
},
|
|
||||||
regex::Regex,
|
|
||||||
std::{
|
|
||||||
fs::{
|
|
||||||
metadata,
|
|
||||||
remove_file,
|
|
||||||
write
|
|
||||||
},
|
|
||||||
os::unix::fs::MetadataExt
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
/// Convert MIDI file to WAV
|
|
||||||
#[poise::command(
|
|
||||||
context_menu_command = "MIDI -> WAV",
|
|
||||||
install_context = "Guild|User",
|
|
||||||
interaction_context = "Guild|BotDm|PrivateChannel"
|
|
||||||
)]
|
|
||||||
pub async fn midi_to_wav(
|
|
||||||
ctx: super::PoiseCtx<'_>,
|
|
||||||
#[description = "MIDI file to be converted"] message: Message
|
|
||||||
) -> KonResult<()> {
|
|
||||||
let re = Regex::new(r"(?i)\.mid$").unwrap();
|
|
||||||
|
|
||||||
if !message.embeds.is_empty() || message.attachments.is_empty() || !re.is_match(&message.attachments[0].filename) {
|
|
||||||
ctx.reply("That ain't a MIDI file! What are you even doing??").await?;
|
|
||||||
return Ok(());
|
|
||||||
}
|
|
||||||
|
|
||||||
ctx.defer().await?;
|
|
||||||
|
|
||||||
let bytes = match message.attachments[0].download().await {
|
|
||||||
Ok(bytes) => bytes,
|
|
||||||
Err(y) => {
|
|
||||||
ctx
|
|
||||||
.send(CreateReply::default().content(format!(
|
|
||||||
"Download failed, ask {} to check console for more information!",
|
|
||||||
mention_dev(ctx).unwrap_or_default()
|
|
||||||
)))
|
|
||||||
.await
|
|
||||||
.unwrap();
|
|
||||||
|
|
||||||
return Err(KonError::from(format!("Failed to download the file: {y}")))
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
let midi_path = &message.attachments[0].filename;
|
|
||||||
write(midi_path, bytes)?;
|
|
||||||
|
|
||||||
let wav_path = re.replace(midi_path, ".wav");
|
|
||||||
|
|
||||||
let sf2_path = "/tmp/FluidR3_GM.sf2";
|
|
||||||
write(sf2_path, include_bytes!("../../../libs/assets/FluidR3_GM.sf2"))?;
|
|
||||||
|
|
||||||
let output = std::process::Command::new("fluidsynth")
|
|
||||||
.args(["-ni", sf2_path, midi_path, "-F", &wav_path])
|
|
||||||
.output();
|
|
||||||
|
|
||||||
// Just to add an info to console to tell what the bot is doing when MIDI file is downloaded.
|
|
||||||
println!("Discord[{}]: Processing MIDI file: \"{midi_path}\"", ctx.command().qualified_name);
|
|
||||||
|
|
||||||
match output {
|
|
||||||
Ok(_) => {
|
|
||||||
let reply = ctx
|
|
||||||
.send(CreateReply::default().attachment(CreateAttachment::path(&*wav_path).await.unwrap()))
|
|
||||||
.await;
|
|
||||||
|
|
||||||
if reply.is_err() {
|
|
||||||
println!(
|
|
||||||
"Discord[{}]: Processed file couldn't be uploaded back to Discord channel due to upload limit",
|
|
||||||
ctx.command().qualified_name
|
|
||||||
);
|
|
||||||
|
|
||||||
ctx
|
|
||||||
.send(CreateReply::default().content(format!(
|
|
||||||
"Couldn't upload the processed file (`{}`, `{}`) due to upload limit",
|
|
||||||
&*wav_path,
|
|
||||||
format_bytes(metadata(&*wav_path).unwrap().size())
|
|
||||||
)))
|
|
||||||
.await?;
|
|
||||||
} else if reply.is_ok() {
|
|
||||||
println!(
|
|
||||||
"Discord[{}]: Processed file uploaded back to Discord channel",
|
|
||||||
ctx.command().qualified_name
|
|
||||||
);
|
|
||||||
remove_file(midi_path)?;
|
|
||||||
remove_file(&*wav_path)?;
|
|
||||||
}
|
|
||||||
},
|
|
||||||
Err(y) => {
|
|
||||||
ctx
|
|
||||||
.send(CreateReply::default().content("Command didn't execute successfully, check console for more information!"))
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
return Err(KonError::from(format!("Midi conversion failed: {y}")))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
BIN
libs/assets/FluidR3_GM.sf2
(Stored with Git LFS)
BIN
libs/assets/FluidR3_GM.sf2
(Stored with Git LFS)
Binary file not shown.
32
src/main.rs
32
src/main.rs
@ -1,18 +1,6 @@
|
|||||||
// https://cdn.toast-server.net/RustFSHiearchy.png
|
// https://cdn.toast-server.net/RustFSHiearchy.png
|
||||||
// Using the new filesystem hierarchy
|
// Using the new filesystem hierarchy
|
||||||
|
|
||||||
#[cfg(feature = "rss")]
|
|
||||||
use {
|
|
||||||
kon_tasks::{
|
|
||||||
rss,
|
|
||||||
run_task
|
|
||||||
},
|
|
||||||
std::{
|
|
||||||
sync::Arc,
|
|
||||||
thread::current
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
use {
|
use {
|
||||||
kon_cmds::register_cmds,
|
kon_cmds::register_cmds,
|
||||||
kon_libs::{
|
kon_libs::{
|
||||||
@ -22,7 +10,6 @@ use {
|
|||||||
GIT_COMMIT_HASH,
|
GIT_COMMIT_HASH,
|
||||||
KonData,
|
KonData,
|
||||||
KonResult,
|
KonResult,
|
||||||
PoiseFwCtx,
|
|
||||||
mention_dev
|
mention_dev
|
||||||
},
|
},
|
||||||
kon_tokens::token_path,
|
kon_tokens::token_path,
|
||||||
@ -30,7 +17,6 @@ use {
|
|||||||
ChannelId,
|
ChannelId,
|
||||||
ClientBuilder,
|
ClientBuilder,
|
||||||
Context,
|
Context,
|
||||||
FullEvent,
|
|
||||||
GatewayIntents,
|
GatewayIntents,
|
||||||
Ready,
|
Ready,
|
||||||
builder::{
|
builder::{
|
||||||
@ -70,23 +56,6 @@ async fn on_ready(
|
|||||||
Ok(KonData {})
|
Ok(KonData {})
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn event_processor(
|
|
||||||
framework: PoiseFwCtx<'_>,
|
|
||||||
event: &FullEvent
|
|
||||||
) -> KonResult<()> {
|
|
||||||
#[cfg(feature = "rss")]
|
|
||||||
if let FullEvent::Ready { .. } = event {
|
|
||||||
let thread_id = format!("{:?}", current().id());
|
|
||||||
let thread_num: String = thread_id.chars().filter(|c| c.is_ascii_digit()).collect();
|
|
||||||
println!("Event[Ready]: Task Scheduler operating on thread {thread_num}");
|
|
||||||
|
|
||||||
let ctx = Arc::new(framework.serenity_context.clone());
|
|
||||||
run_task(ctx.clone(), rss).await;
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
#[tokio::main]
|
#[tokio::main]
|
||||||
async fn main() {
|
async fn main() {
|
||||||
let prefix = if BINARY_PROPERTIES.env.contains("dev") {
|
let prefix = if BINARY_PROPERTIES.env.contains("dev") {
|
||||||
@ -138,7 +107,6 @@ async fn main() {
|
|||||||
})
|
})
|
||||||
},
|
},
|
||||||
initialize_owners: true,
|
initialize_owners: true,
|
||||||
event_handler: |framework, event| Box::pin(event_processor(framework, event)),
|
|
||||||
..Default::default()
|
..Default::default()
|
||||||
})
|
})
|
||||||
.setup(|ctx, ready, _| Box::pin(on_ready(ctx, ready)))
|
.setup(|ctx, ready, _| Box::pin(on_ready(ctx, ready)))
|
||||||
|
@ -1,18 +0,0 @@
|
|||||||
[package]
|
|
||||||
name = "kon_tasks"
|
|
||||||
version = "0.1.2"
|
|
||||||
edition = "2024"
|
|
||||||
|
|
||||||
[dependencies]
|
|
||||||
feed-rs = { workspace = true }
|
|
||||||
kon_libs = { workspace = true }
|
|
||||||
kon_repo = { workspace = true }
|
|
||||||
once_cell = { workspace = true }
|
|
||||||
poise = { workspace = true }
|
|
||||||
regex = { workspace = true }
|
|
||||||
reqwest = { workspace = true }
|
|
||||||
tokio = { workspace = true }
|
|
||||||
|
|
||||||
[features]
|
|
||||||
production = ["kon_libs/production"]
|
|
||||||
rss = []
|
|
@ -1,62 +0,0 @@
|
|||||||
#[cfg(feature = "rss")]
|
|
||||||
mod rss;
|
|
||||||
|
|
||||||
#[cfg(feature = "rss")]
|
|
||||||
pub use rss::rss;
|
|
||||||
|
|
||||||
use {
|
|
||||||
kon_libs::KonResult,
|
|
||||||
poise::serenity_prelude::Context,
|
|
||||||
std::{
|
|
||||||
future::Future,
|
|
||||||
sync::{
|
|
||||||
Arc,
|
|
||||||
atomic::{
|
|
||||||
AtomicBool,
|
|
||||||
Ordering
|
|
||||||
}
|
|
||||||
}
|
|
||||||
},
|
|
||||||
tokio::task::spawn
|
|
||||||
};
|
|
||||||
|
|
||||||
#[cfg(feature = "rss")]
|
|
||||||
fn task_info(
|
|
||||||
name: &str,
|
|
||||||
message: &str
|
|
||||||
) {
|
|
||||||
println!("TaskScheduler[{name}]: {message}")
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(feature = "rss")]
|
|
||||||
fn task_err(
|
|
||||||
name: &str,
|
|
||||||
message: &str
|
|
||||||
) {
|
|
||||||
eprintln!("TaskScheduler[{name}:Error]: {message}")
|
|
||||||
}
|
|
||||||
|
|
||||||
static TASK_RUNNING: AtomicBool = AtomicBool::new(false);
|
|
||||||
|
|
||||||
pub async fn run_task<F, T>(
|
|
||||||
ctx: Arc<Context>,
|
|
||||||
task: F
|
|
||||||
) where
|
|
||||||
F: Fn(Arc<Context>) -> T + Send + 'static,
|
|
||||||
T: Future<Output = KonResult<()>> + Send + 'static
|
|
||||||
{
|
|
||||||
let ctx_cl = Arc::clone(&ctx);
|
|
||||||
|
|
||||||
if !TASK_RUNNING.load(Ordering::SeqCst) {
|
|
||||||
TASK_RUNNING.store(true, Ordering::SeqCst);
|
|
||||||
spawn(async move {
|
|
||||||
if let Err(y) = task(ctx_cl).await {
|
|
||||||
eprintln!("TaskScheduler[Main:Error]: Failed to execute the task, error reason: {y}");
|
|
||||||
if let Some(source) = y.source() {
|
|
||||||
eprintln!("TaskScheduler[Main:Error]: Failed to execute the task, this is caused by: {source:#?}");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
TASK_RUNNING.store(false, Ordering::SeqCst);
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
206
tasks/src/rss.rs
206
tasks/src/rss.rs
@ -1,206 +0,0 @@
|
|||||||
mod processor; // Process the feeds and send it off to Discord
|
|
||||||
|
|
||||||
mod esxi;
|
|
||||||
mod github;
|
|
||||||
mod gportal;
|
|
||||||
mod rust;
|
|
||||||
|
|
||||||
use {
|
|
||||||
esxi::Esxi,
|
|
||||||
github::GitHub,
|
|
||||||
gportal::GPortal,
|
|
||||||
rust::RustBlog
|
|
||||||
};
|
|
||||||
|
|
||||||
use super::{
|
|
||||||
task_err,
|
|
||||||
task_info
|
|
||||||
};
|
|
||||||
|
|
||||||
use {
|
|
||||||
feed_rs::parser::parse,
|
|
||||||
kon_libs::{
|
|
||||||
HttpClient,
|
|
||||||
KonResult
|
|
||||||
},
|
|
||||||
kon_repo::RedisController,
|
|
||||||
once_cell::sync::OnceCell,
|
|
||||||
poise::serenity_prelude::{
|
|
||||||
Context,
|
|
||||||
CreateEmbed,
|
|
||||||
Timestamp,
|
|
||||||
async_trait
|
|
||||||
},
|
|
||||||
regex::Regex,
|
|
||||||
reqwest::Response,
|
|
||||||
std::sync::Arc,
|
|
||||||
tokio::time::{
|
|
||||||
Duration,
|
|
||||||
interval
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
pub type RSSFeedBox = Box<dyn RSSFeed + Send + Sync>;
|
|
||||||
|
|
||||||
const TASK_NAME: &str = "RSS";
|
|
||||||
static REDIS_EXPIRY_SECS: i64 = 7200;
|
|
||||||
static REDIS_SERVICE: OnceCell<Arc<RedisController>> = OnceCell::new();
|
|
||||||
|
|
||||||
async fn redis_() {
|
|
||||||
let redis = RedisController::new().await.unwrap();
|
|
||||||
REDIS_SERVICE.set(Arc::new(redis)).unwrap();
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn get_redis() -> Arc<RedisController> {
|
|
||||||
if REDIS_SERVICE.get().is_none() {
|
|
||||||
redis_().await;
|
|
||||||
}
|
|
||||||
REDIS_SERVICE.get().unwrap().clone()
|
|
||||||
}
|
|
||||||
|
|
||||||
fn format_href_to_discord(input: &str) -> String {
|
|
||||||
let re = Regex::new(r#"<a href="([^"]+)">([^<]+)</a>"#).unwrap();
|
|
||||||
re.replace_all(input, r"[$2]($1)").to_string()
|
|
||||||
}
|
|
||||||
|
|
||||||
fn format_html_to_discord(input: String) -> String {
|
|
||||||
let mut output = input;
|
|
||||||
|
|
||||||
// Replace all instances of <p> and </p> with newlines
|
|
||||||
output = Regex::new(r#"</?\s*p\s*>"#).unwrap().replace_all(&output, "\n").to_string();
|
|
||||||
|
|
||||||
// Replace all instances of <br> and <br /> with newlines
|
|
||||||
output = Regex::new(r#"<\s*br\s*/?\s*>"#).unwrap().replace_all(&output, "\n").to_string();
|
|
||||||
|
|
||||||
// Replace all instances of <strong> with **
|
|
||||||
output = Regex::new(r#"</?\s*strong\s*>"#).unwrap().replace_all(&output, "**").to_string();
|
|
||||||
|
|
||||||
// Replace all instances of <var> and <small> with nothing
|
|
||||||
output = Regex::new(r#"</?\s*(var|small)\s*>"#).unwrap().replace_all(&output, "").to_string();
|
|
||||||
|
|
||||||
// Remove any other HTML tags
|
|
||||||
output = Regex::new(r#"<[^>]+>"#).unwrap().replace_all(&output, "").to_string();
|
|
||||||
|
|
||||||
// Replace all instances of <a href="url">text</a> with [text](url)
|
|
||||||
output = format_href_to_discord(&output);
|
|
||||||
|
|
||||||
output
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn fetch_feed(url: &str) -> KonResult<Response> {
|
|
||||||
let http = HttpClient::new();
|
|
||||||
let res = match http.get(url, "RSS-Monitor").await {
|
|
||||||
Ok(res) => res,
|
|
||||||
Err(y) => return Err(y.into())
|
|
||||||
};
|
|
||||||
|
|
||||||
Ok(res)
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn save_to_redis(
|
|
||||||
key: &str,
|
|
||||||
value: &str
|
|
||||||
) -> KonResult<()> {
|
|
||||||
let redis = get_redis().await;
|
|
||||||
redis.set(key, value).await.unwrap();
|
|
||||||
if let Err(y) = redis.expire(key, REDIS_EXPIRY_SECS).await {
|
|
||||||
task_err("RSS", format!("[RedisExpiry]: {y}").as_str());
|
|
||||||
}
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
fn embed(
|
|
||||||
color: u32,
|
|
||||||
title: String,
|
|
||||||
url: String,
|
|
||||||
description: String,
|
|
||||||
timestamp: Timestamp
|
|
||||||
) -> CreateEmbed {
|
|
||||||
CreateEmbed::new()
|
|
||||||
.color(color)
|
|
||||||
.title(title)
|
|
||||||
.url(url)
|
|
||||||
.description(description)
|
|
||||||
.timestamp(timestamp)
|
|
||||||
}
|
|
||||||
|
|
||||||
const MAX_CONTENT_LENGTH: usize = 4000;
|
|
||||||
fn trim_old_content(s: &str) -> String {
|
|
||||||
if s.len() > MAX_CONTENT_LENGTH {
|
|
||||||
s[..MAX_CONTENT_LENGTH].to_string()
|
|
||||||
} else {
|
|
||||||
s.to_string()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
enum IncidentColorMap {
|
|
||||||
Update,
|
|
||||||
Investigating,
|
|
||||||
Monitoring,
|
|
||||||
Resolved,
|
|
||||||
Default
|
|
||||||
}
|
|
||||||
|
|
||||||
impl IncidentColorMap {
|
|
||||||
fn color(&self) -> u32 {
|
|
||||||
match self {
|
|
||||||
Self::Update => 0xABDD9E, // Madang
|
|
||||||
Self::Investigating => 0xA5CCE0, // French Pass
|
|
||||||
Self::Monitoring => 0x81CBAD, // Monte Carlo
|
|
||||||
Self::Resolved => 0x57F287, // Emerald
|
|
||||||
Self::Default => 0x81CBAD // Monte Carlo
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[async_trait]
|
|
||||||
pub trait RSSFeed {
|
|
||||||
fn name(&self) -> &str;
|
|
||||||
fn url(&self) -> &str;
|
|
||||||
async fn process(
|
|
||||||
&self,
|
|
||||||
ctx: Arc<Context>
|
|
||||||
) -> KonResult<Option<RSSFeedOutput>>;
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Handle feed's output type for Discord message
|
|
||||||
pub enum RSSFeedOutput {
|
|
||||||
RegularEmbed(CreateEmbed),
|
|
||||||
IncidentEmbed(CreateEmbed),
|
|
||||||
Content(String)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub async fn rss(ctx: Arc<Context>) -> KonResult<()> {
|
|
||||||
#[cfg(feature = "production")]
|
|
||||||
let mut interval = interval(Duration::from_secs(300)); // Check feeds every 5 mins
|
|
||||||
#[cfg(not(feature = "production"))]
|
|
||||||
let mut interval = interval(Duration::from_secs(30)); // Check feeds every 30 secs
|
|
||||||
let mut first_run = true;
|
|
||||||
task_info(TASK_NAME, "Task loaded!");
|
|
||||||
|
|
||||||
let feeds: Vec<RSSFeedBox> = vec![
|
|
||||||
Box::new(Esxi::new("https://esxi-patches.v-front.de/atom/ESXi-7.0.0.xml".to_string())),
|
|
||||||
Box::new(GitHub::new("https://www.githubstatus.com/history.atom".to_string())),
|
|
||||||
Box::new(GPortal::new("https://status.g-portal.com/history.atom".to_string())),
|
|
||||||
Box::new(RustBlog::new("https://blog.rust-lang.org/feed.xml".to_string())),
|
|
||||||
];
|
|
||||||
|
|
||||||
let mut processor = processor::RSSProcessor::new();
|
|
||||||
|
|
||||||
for feed in feeds {
|
|
||||||
processor.add_feed(feed);
|
|
||||||
}
|
|
||||||
|
|
||||||
loop {
|
|
||||||
interval.tick().await;
|
|
||||||
|
|
||||||
if first_run {
|
|
||||||
task_info(&format!("{TASK_NAME}:Processor"), "Starting up!");
|
|
||||||
first_run = false;
|
|
||||||
}
|
|
||||||
|
|
||||||
if let Err(e) = processor.process_all(ctx.clone()).await {
|
|
||||||
task_err(&format!("{TASK_NAME}:Processor"), &e.to_string());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,115 +0,0 @@
|
|||||||
use super::{
|
|
||||||
RSSFeed,
|
|
||||||
RSSFeedOutput,
|
|
||||||
fetch_feed,
|
|
||||||
format_href_to_discord,
|
|
||||||
get_redis,
|
|
||||||
parse,
|
|
||||||
save_to_redis,
|
|
||||||
task_err
|
|
||||||
};
|
|
||||||
|
|
||||||
use {
|
|
||||||
kon_libs::KonResult,
|
|
||||||
poise::serenity_prelude::{
|
|
||||||
Context,
|
|
||||||
CreateEmbed,
|
|
||||||
CreateEmbedAuthor,
|
|
||||||
Timestamp,
|
|
||||||
async_trait
|
|
||||||
},
|
|
||||||
regex::Regex,
|
|
||||||
std::{
|
|
||||||
io::Cursor,
|
|
||||||
sync::Arc
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
pub struct Esxi {
|
|
||||||
url: String
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Esxi {
|
|
||||||
pub fn new(url: String) -> Self { Self { url } }
|
|
||||||
}
|
|
||||||
|
|
||||||
#[async_trait]
|
|
||||||
impl RSSFeed for Esxi {
|
|
||||||
fn name(&self) -> &str { "ESXi" }
|
|
||||||
|
|
||||||
fn url(&self) -> &str { self.url.as_str() }
|
|
||||||
|
|
||||||
async fn process(
|
|
||||||
&self,
|
|
||||||
_ctx: Arc<Context>
|
|
||||||
) -> KonResult<Option<RSSFeedOutput>> {
|
|
||||||
let redis = get_redis().await;
|
|
||||||
let rkey = "RSS_ESXi";
|
|
||||||
|
|
||||||
let res = fetch_feed(self.url()).await?;
|
|
||||||
let data = res.text().await?;
|
|
||||||
let cursor = Cursor::new(data);
|
|
||||||
|
|
||||||
let feed = parse(cursor).map_err(|e| {
|
|
||||||
task_err("RSS:ESXi", &format!("Error parsing RSS feed: {e}"));
|
|
||||||
e
|
|
||||||
})?;
|
|
||||||
|
|
||||||
if feed.entries.is_empty() {
|
|
||||||
task_err("RSS:ESXi", "No entries found in the feed!");
|
|
||||||
return Ok(None);
|
|
||||||
}
|
|
||||||
|
|
||||||
let home_page = feed.links[0].clone().href;
|
|
||||||
let article = feed.entries[0].clone();
|
|
||||||
|
|
||||||
fn get_patch_version(input: &str) -> Option<String> {
|
|
||||||
let re = Regex::new(r#"(?i)Update\s+([0-9]+)([a-z]?)"#).unwrap();
|
|
||||||
|
|
||||||
if let Some(caps) = re.captures(input) {
|
|
||||||
let update_num = caps[1].to_string();
|
|
||||||
let letter = caps.get(2).map_or("", |m| m.as_str());
|
|
||||||
Some(format!("Update {update_num}{letter}"))
|
|
||||||
} else {
|
|
||||||
None
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
let cached_patch = redis.get(rkey).await.unwrap_or(None).unwrap_or_default();
|
|
||||||
|
|
||||||
if cached_patch.is_empty() {
|
|
||||||
save_to_redis(rkey, &article.categories[3].term).await?;
|
|
||||||
return Ok(None);
|
|
||||||
}
|
|
||||||
|
|
||||||
if let Some(patch) = get_patch_version(&article.categories[3].term) {
|
|
||||||
if patch == cached_patch {
|
|
||||||
Ok(None)
|
|
||||||
} else {
|
|
||||||
save_to_redis(rkey, &article.categories[3].term).await?;
|
|
||||||
|
|
||||||
Ok(Some(RSSFeedOutput::RegularEmbed(
|
|
||||||
CreateEmbed::new()
|
|
||||||
.color(0x4EFBCB)
|
|
||||||
.author(CreateEmbedAuthor::new(feed.title.unwrap().content).url(home_page))
|
|
||||||
.thumbnail(feed.logo.unwrap().uri)
|
|
||||||
.description(format!(
|
|
||||||
"{} {} for {} {} has been rolled out!\n{}",
|
|
||||||
article.categories[2].term,
|
|
||||||
article.categories[3].term,
|
|
||||||
article.categories[0].term,
|
|
||||||
article.categories[1].term,
|
|
||||||
format_href_to_discord(&article.summary.unwrap().content)
|
|
||||||
))
|
|
||||||
.timestamp(Timestamp::from(article.updated.unwrap()))
|
|
||||||
)))
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
task_err(
|
|
||||||
"RSS:ESXi",
|
|
||||||
&format!("Article term does not match the expected RegEx pattern! ({})", article.categories[3].term)
|
|
||||||
);
|
|
||||||
Ok(None)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,144 +0,0 @@
|
|||||||
use super::{
|
|
||||||
IncidentColorMap,
|
|
||||||
RSSFeed,
|
|
||||||
RSSFeedOutput,
|
|
||||||
embed,
|
|
||||||
fetch_feed,
|
|
||||||
format_html_to_discord,
|
|
||||||
get_redis,
|
|
||||||
parse,
|
|
||||||
save_to_redis,
|
|
||||||
task_err,
|
|
||||||
task_info,
|
|
||||||
trim_old_content
|
|
||||||
};
|
|
||||||
|
|
||||||
use {
|
|
||||||
kon_libs::KonResult,
|
|
||||||
poise::serenity_prelude::{
|
|
||||||
Context,
|
|
||||||
Timestamp,
|
|
||||||
async_trait
|
|
||||||
},
|
|
||||||
regex::Regex,
|
|
||||||
std::{
|
|
||||||
io::Cursor,
|
|
||||||
sync::Arc
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
pub struct GitHub {
|
|
||||||
url: String
|
|
||||||
}
|
|
||||||
|
|
||||||
impl GitHub {
|
|
||||||
pub fn new(url: String) -> Self { Self { url } }
|
|
||||||
}
|
|
||||||
|
|
||||||
#[async_trait]
|
|
||||||
impl RSSFeed for GitHub {
|
|
||||||
fn name(&self) -> &str { "GitHub" }
|
|
||||||
|
|
||||||
fn url(&self) -> &str { self.url.as_str() }
|
|
||||||
|
|
||||||
async fn process(
|
|
||||||
&self,
|
|
||||||
_ctx: Arc<Context>
|
|
||||||
) -> KonResult<Option<RSSFeedOutput>> {
|
|
||||||
let redis = get_redis().await;
|
|
||||||
let rkey = "RSS_GitHub";
|
|
||||||
let rkey_content = format!("{rkey}_Content");
|
|
||||||
|
|
||||||
let res = fetch_feed(self.url()).await?;
|
|
||||||
let data = res.text().await?;
|
|
||||||
let cursor = Cursor::new(data);
|
|
||||||
|
|
||||||
let feed = parse(cursor).map_err(|e| {
|
|
||||||
task_err("RSS:GitHub", &format!("Error parsing RSS feed: {e}"));
|
|
||||||
e
|
|
||||||
})?;
|
|
||||||
|
|
||||||
if feed.entries.is_empty() {
|
|
||||||
task_err("RSS:GitHub", "No entries found in the feed!");
|
|
||||||
return Ok(None);
|
|
||||||
}
|
|
||||||
|
|
||||||
let incident_page = feed.entries[0].links[0].clone().href;
|
|
||||||
let article = feed.entries[0].clone();
|
|
||||||
|
|
||||||
fn get_incident_id(input: &str) -> Option<String> {
|
|
||||||
let re = Regex::new(r#"/incidents/([a-zA-Z0-9]+)$"#).unwrap();
|
|
||||||
re.captures(input).map(|caps| caps[1].to_string())
|
|
||||||
}
|
|
||||||
|
|
||||||
let cached_incident = redis.get(rkey).await.unwrap().unwrap_or_default();
|
|
||||||
let new_content = format_html_to_discord(article.content.unwrap().body.unwrap());
|
|
||||||
|
|
||||||
let update_patt = Regex::new(r"(?i)\bupdate\b").unwrap();
|
|
||||||
let investigating_patt = Regex::new(r"(?i)\binvestigating\b").unwrap();
|
|
||||||
let resolved_patt = Regex::new(r"(?i)\bresolved\b").unwrap();
|
|
||||||
let date_patt = Regex::new(r"\b[A-Z][a-z]{2} \d{2}, \d{2}:\d{2} UTC\b").unwrap();
|
|
||||||
|
|
||||||
let first_entry = date_patt
|
|
||||||
.split(&new_content)
|
|
||||||
.map(str::trim)
|
|
||||||
.find(|e| !e.is_empty())
|
|
||||||
.unwrap_or(&new_content);
|
|
||||||
|
|
||||||
let color: u32 = if update_patt.is_match(first_entry) {
|
|
||||||
IncidentColorMap::Update.color()
|
|
||||||
} else if investigating_patt.is_match(first_entry) {
|
|
||||||
IncidentColorMap::Investigating.color()
|
|
||||||
} else if resolved_patt.is_match(first_entry) {
|
|
||||||
IncidentColorMap::Resolved.color()
|
|
||||||
} else {
|
|
||||||
IncidentColorMap::Default.color()
|
|
||||||
};
|
|
||||||
|
|
||||||
task_info("RSS:GitHub:Debug", &format!("Checking cache for incident ID: {}", &article.links[0].href));
|
|
||||||
if cached_incident.is_empty() {
|
|
||||||
save_to_redis(rkey, &get_incident_id(&article.links[0].href).unwrap()).await?;
|
|
||||||
save_to_redis(&rkey_content, &new_content).await?;
|
|
||||||
return Ok(None);
|
|
||||||
}
|
|
||||||
|
|
||||||
if let Some(incident) = get_incident_id(&article.links[0].href) {
|
|
||||||
if incident == cached_incident {
|
|
||||||
let cached_content = redis.get(&rkey_content).await.unwrap().unwrap_or_default();
|
|
||||||
if cached_content == new_content {
|
|
||||||
Ok(None)
|
|
||||||
} else {
|
|
||||||
redis.set(&rkey_content, &new_content).await.unwrap();
|
|
||||||
redis.expire(&rkey_content, 21600).await.unwrap();
|
|
||||||
task_info("RSS:GitHub:Debug", "Incident added in cache and preparing to send embed to Discord");
|
|
||||||
|
|
||||||
Ok(Some(RSSFeedOutput::IncidentEmbed(embed(
|
|
||||||
color,
|
|
||||||
article.title.unwrap().content,
|
|
||||||
incident_page,
|
|
||||||
trim_old_content(&new_content),
|
|
||||||
Timestamp::from(article.updated.unwrap())
|
|
||||||
))))
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
save_to_redis(rkey, &incident).await?;
|
|
||||||
redis.set(&rkey_content, &new_content).await.unwrap();
|
|
||||||
task_info("RSS:GitHub:Debug", "Incident updated in cache and preparing to send embed to Discord");
|
|
||||||
|
|
||||||
Ok(Some(RSSFeedOutput::IncidentEmbed(embed(
|
|
||||||
color,
|
|
||||||
article.title.unwrap().content,
|
|
||||||
incident_page,
|
|
||||||
trim_old_content(&new_content),
|
|
||||||
Timestamp::from(article.updated.unwrap())
|
|
||||||
))))
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
task_err(
|
|
||||||
"RSS:GitHub",
|
|
||||||
&format!("Incident ID does not match the expected RegEx pattern! ({})", &article.links[0].href)
|
|
||||||
);
|
|
||||||
Ok(None)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,143 +0,0 @@
|
|||||||
use super::{
|
|
||||||
IncidentColorMap,
|
|
||||||
RSSFeed,
|
|
||||||
RSSFeedOutput,
|
|
||||||
embed,
|
|
||||||
fetch_feed,
|
|
||||||
format_html_to_discord,
|
|
||||||
get_redis,
|
|
||||||
parse,
|
|
||||||
save_to_redis,
|
|
||||||
task_err,
|
|
||||||
trim_old_content
|
|
||||||
};
|
|
||||||
|
|
||||||
use {
|
|
||||||
kon_libs::KonResult,
|
|
||||||
poise::serenity_prelude::{
|
|
||||||
Context,
|
|
||||||
Timestamp,
|
|
||||||
async_trait
|
|
||||||
},
|
|
||||||
regex::Regex,
|
|
||||||
std::{
|
|
||||||
io::Cursor,
|
|
||||||
sync::Arc
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
pub struct GPortal {
|
|
||||||
url: String
|
|
||||||
}
|
|
||||||
|
|
||||||
impl GPortal {
|
|
||||||
pub fn new(url: String) -> Self { Self { url } }
|
|
||||||
}
|
|
||||||
|
|
||||||
#[async_trait]
|
|
||||||
impl RSSFeed for GPortal {
|
|
||||||
fn name(&self) -> &str { "GPortal" }
|
|
||||||
|
|
||||||
fn url(&self) -> &str { self.url.as_str() }
|
|
||||||
|
|
||||||
async fn process(
|
|
||||||
&self,
|
|
||||||
_ctx: Arc<Context>
|
|
||||||
) -> KonResult<Option<RSSFeedOutput>> {
|
|
||||||
let redis = get_redis().await;
|
|
||||||
let rkey = "RSS_GPortal";
|
|
||||||
let rkey_content = format!("{rkey}_Content");
|
|
||||||
|
|
||||||
let res = fetch_feed(self.url()).await?;
|
|
||||||
let data = res.text().await?;
|
|
||||||
let cursor = Cursor::new(data);
|
|
||||||
|
|
||||||
let feed = parse(cursor).map_err(|e| {
|
|
||||||
task_err("RSS:GPortal", &format!("Error parsing RSS feed: {e}"));
|
|
||||||
e
|
|
||||||
})?;
|
|
||||||
|
|
||||||
if feed.entries.is_empty() {
|
|
||||||
task_err("RSS:GPortal", "No entries found in the feed!");
|
|
||||||
return Ok(None);
|
|
||||||
}
|
|
||||||
|
|
||||||
let incident_page = feed.links[0].clone().href;
|
|
||||||
let article = feed.entries[0].clone();
|
|
||||||
|
|
||||||
fn get_incident_id(input: &str) -> Option<String> {
|
|
||||||
let re = Regex::new(r#"/incidents/([a-zA-Z0-9]+)$"#).unwrap();
|
|
||||||
re.captures(input).map(|caps| caps[1].to_string())
|
|
||||||
}
|
|
||||||
|
|
||||||
let cached_incident = redis.get(rkey).await.unwrap().unwrap_or_default();
|
|
||||||
let new_content = format_html_to_discord(article.content.unwrap().body.unwrap());
|
|
||||||
|
|
||||||
let update_patt = Regex::new(r"(?i)\bupdate\b").unwrap();
|
|
||||||
let investigating_patt = Regex::new(r"(?i)\binvestigating\b").unwrap();
|
|
||||||
let monitoring_patt = Regex::new(r"(?i)\bmonitoring\b").unwrap();
|
|
||||||
let resolved_patt = Regex::new(r"(?i)\bresolved\b").unwrap();
|
|
||||||
let date_patt = Regex::new(r"\b[A-Z][a-z]{2} \d{2}, \d{2}:\d{2} UTC\b").unwrap();
|
|
||||||
|
|
||||||
let first_entry = date_patt
|
|
||||||
.split(&new_content)
|
|
||||||
.map(str::trim)
|
|
||||||
.find(|e| !e.is_empty())
|
|
||||||
.unwrap_or(&new_content);
|
|
||||||
|
|
||||||
let color: u32 = if update_patt.is_match(first_entry) {
|
|
||||||
IncidentColorMap::Update.color()
|
|
||||||
} else if investigating_patt.is_match(first_entry) {
|
|
||||||
IncidentColorMap::Investigating.color()
|
|
||||||
} else if monitoring_patt.is_match(first_entry) {
|
|
||||||
IncidentColorMap::Monitoring.color()
|
|
||||||
} else if resolved_patt.is_match(first_entry) {
|
|
||||||
IncidentColorMap::Resolved.color()
|
|
||||||
} else {
|
|
||||||
IncidentColorMap::Default.color()
|
|
||||||
};
|
|
||||||
|
|
||||||
if cached_incident.is_empty() {
|
|
||||||
save_to_redis(rkey, &get_incident_id(&article.links[0].href).unwrap()).await?;
|
|
||||||
save_to_redis(&rkey_content, &new_content).await?;
|
|
||||||
return Ok(None);
|
|
||||||
}
|
|
||||||
|
|
||||||
if let Some(incident) = get_incident_id(&article.links[0].href) {
|
|
||||||
if incident == cached_incident {
|
|
||||||
let cached_content = redis.get(&rkey_content).await.unwrap().unwrap_or_default();
|
|
||||||
if cached_content == new_content {
|
|
||||||
Ok(None)
|
|
||||||
} else {
|
|
||||||
redis.set(&rkey_content, &new_content).await.unwrap();
|
|
||||||
redis.expire(&rkey_content, 21600).await.unwrap();
|
|
||||||
|
|
||||||
Ok(Some(RSSFeedOutput::IncidentEmbed(embed(
|
|
||||||
color,
|
|
||||||
article.title.unwrap().content,
|
|
||||||
incident_page,
|
|
||||||
trim_old_content(&new_content),
|
|
||||||
Timestamp::from(article.updated.unwrap())
|
|
||||||
))))
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
save_to_redis(rkey, &incident).await?;
|
|
||||||
redis.set(&rkey_content, &new_content).await.unwrap();
|
|
||||||
|
|
||||||
Ok(Some(RSSFeedOutput::IncidentEmbed(embed(
|
|
||||||
color,
|
|
||||||
article.title.unwrap().content,
|
|
||||||
incident_page,
|
|
||||||
trim_old_content(&new_content),
|
|
||||||
Timestamp::from(article.updated.unwrap())
|
|
||||||
))))
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
task_err(
|
|
||||||
"RSS:GPortal",
|
|
||||||
&format!("Incident ID does not match the expected RegEx pattern! ({})", &article.links[0].href)
|
|
||||||
);
|
|
||||||
Ok(None)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,173 +0,0 @@
|
|||||||
use kon_libs::{
|
|
||||||
BINARY_PROPERTIES,
|
|
||||||
KonResult
|
|
||||||
};
|
|
||||||
|
|
||||||
use super::{
|
|
||||||
RSSFeedBox,
|
|
||||||
RSSFeedOutput,
|
|
||||||
TASK_NAME,
|
|
||||||
get_redis
|
|
||||||
};
|
|
||||||
|
|
||||||
use {
|
|
||||||
poise::serenity_prelude::{
|
|
||||||
ChannelId,
|
|
||||||
Context,
|
|
||||||
CreateEmbed,
|
|
||||||
CreateMessage,
|
|
||||||
EditMessage,
|
|
||||||
Http
|
|
||||||
},
|
|
||||||
regex::Regex,
|
|
||||||
std::sync::Arc
|
|
||||||
};
|
|
||||||
|
|
||||||
// This is for building up the embed with the feed data
|
|
||||||
/* std::fs::File::create("rss_name.log").unwrap();
|
|
||||||
std::fs::write("rss_name.log", format!("{:#?}", feed))?; */
|
|
||||||
|
|
||||||
async fn process_regular_embed(
|
|
||||||
http: &Http,
|
|
||||||
embed: CreateEmbed,
|
|
||||||
redis_key: &str
|
|
||||||
) -> KonResult<()> {
|
|
||||||
let redis = get_redis().await;
|
|
||||||
let channel = ChannelId::new(BINARY_PROPERTIES.rss_channel);
|
|
||||||
|
|
||||||
let msg_id_key: Option<String> = redis.get(redis_key).await?;
|
|
||||||
|
|
||||||
if let Some(msg_id_key) = msg_id_key {
|
|
||||||
if let Ok(msg_id) = msg_id_key.parse::<u64>() {
|
|
||||||
if let Ok(mut message) = channel.message(http, msg_id).await {
|
|
||||||
message.edit(http, EditMessage::new().embed(embed)).await?;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
let message = channel.send_message(http, CreateMessage::new().add_embed(embed)).await?;
|
|
||||||
redis.set(redis_key, &message.id.to_string()).await?;
|
|
||||||
redis.expire(redis_key, 36000).await?;
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Cache-based embed updater for ongoing outages/incidents
|
|
||||||
async fn process_incident_embed(
|
|
||||||
http: &Http,
|
|
||||||
embed: CreateEmbed,
|
|
||||||
redis_key: &str,
|
|
||||||
content_key: &str
|
|
||||||
) -> KonResult<()> {
|
|
||||||
let redis = get_redis().await;
|
|
||||||
let channel = ChannelId::new(BINARY_PROPERTIES.rss_channel);
|
|
||||||
|
|
||||||
let msg_id_key: Option<String> = redis.get(redis_key).await?;
|
|
||||||
let cached_content: Option<String> = redis.get(content_key).await.unwrap_or(None);
|
|
||||||
|
|
||||||
if let Some(msg_id_key) = msg_id_key {
|
|
||||||
if let Ok(msg_id) = msg_id_key.parse::<u64>() {
|
|
||||||
if let Ok(mut message) = channel.message(http, msg_id).await {
|
|
||||||
if let Some(existing) = message.embeds.first() {
|
|
||||||
let new_description = existing.description.clone().unwrap();
|
|
||||||
|
|
||||||
if cached_content.as_deref() != Some(&new_description) {
|
|
||||||
message.edit(http, EditMessage::new().embed(embed)).await?;
|
|
||||||
}
|
|
||||||
|
|
||||||
if Regex::new(r"(?i)^Resolved\s*-").unwrap().is_match(&new_description) {
|
|
||||||
redis.del(redis_key).await?;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
let message = channel.send_message(http, CreateMessage::new().add_embed(embed)).await?;
|
|
||||||
redis.set(redis_key, &message.id.to_string()).await?;
|
|
||||||
redis.expire(redis_key, 36000).await?;
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Process the content string
|
|
||||||
async fn process_msg_content(
|
|
||||||
http: &Http,
|
|
||||||
content: String,
|
|
||||||
redis_key: &str
|
|
||||||
) -> KonResult<()> {
|
|
||||||
let redis = get_redis().await;
|
|
||||||
let channel = ChannelId::new(BINARY_PROPERTIES.rss_channel);
|
|
||||||
|
|
||||||
let msg_id_key: Option<String> = redis.get(redis_key).await?;
|
|
||||||
|
|
||||||
if let Some(msg_id_key) = msg_id_key {
|
|
||||||
if let Ok(msg_id) = msg_id_key.parse::<u64>() {
|
|
||||||
channel.edit_message(http, msg_id, EditMessage::new().content(content)).await?;
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
let message = channel.send_message(http, CreateMessage::new().content(content)).await?;
|
|
||||||
redis.set(redis_key, &message.id.to_string()).await?;
|
|
||||||
redis.expire(redis_key, 36000).await?;
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
pub struct RSSProcessor {
|
|
||||||
pub feeds: Vec<RSSFeedBox>
|
|
||||||
}
|
|
||||||
|
|
||||||
impl RSSProcessor {
|
|
||||||
pub fn new() -> Self { Self { feeds: Vec::new() } }
|
|
||||||
|
|
||||||
pub fn add_feed(
|
|
||||||
&mut self,
|
|
||||||
feed: RSSFeedBox
|
|
||||||
) {
|
|
||||||
self.feeds.push(feed);
|
|
||||||
}
|
|
||||||
|
|
||||||
pub async fn process_all(
|
|
||||||
&self,
|
|
||||||
ctx: Arc<Context>
|
|
||||||
) -> KonResult<()> {
|
|
||||||
let mut discord_msg: Vec<String> = Vec::new();
|
|
||||||
|
|
||||||
for feed in &self.feeds {
|
|
||||||
let feed_name = feed.name();
|
|
||||||
let redis_key = format!("RSS_{feed_name}_MsgId");
|
|
||||||
let error_msg = format!("**[{TASK_NAME}:{feed_name}:Error]:** Feed failed with the following error:```\n{{ error }}\n```");
|
|
||||||
|
|
||||||
match feed.process(ctx.clone()).await {
|
|
||||||
Ok(Some(output)) => match output {
|
|
||||||
RSSFeedOutput::RegularEmbed(embed) => {
|
|
||||||
if let Err(e) = process_regular_embed(&ctx.http, embed, &redis_key).await {
|
|
||||||
discord_msg.push(error_msg.replace("{{ error }}", &e.to_string()))
|
|
||||||
}
|
|
||||||
},
|
|
||||||
RSSFeedOutput::IncidentEmbed(embed) => {
|
|
||||||
if let Err(e) = process_incident_embed(&ctx.http, embed, &redis_key, &format!("RSS_{feed_name}_Content")).await {
|
|
||||||
discord_msg.push(error_msg.replace("{{ error }}", &e.to_string()))
|
|
||||||
}
|
|
||||||
},
|
|
||||||
RSSFeedOutput::Content(content) => {
|
|
||||||
if let Err(e) = process_msg_content(&ctx.http, content, &redis_key).await {
|
|
||||||
discord_msg.push(error_msg.replace("{{ error }}", &e.to_string()))
|
|
||||||
}
|
|
||||||
},
|
|
||||||
},
|
|
||||||
Ok(None) => (),
|
|
||||||
Err(e) => discord_msg.push(error_msg.replace("{{ error }}", &e.to_string()))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if !discord_msg.is_empty() {
|
|
||||||
ChannelId::new(BINARY_PROPERTIES.kon_logs)
|
|
||||||
.send_message(&ctx.http, CreateMessage::new().content(discord_msg.join("\n")))
|
|
||||||
.await?;
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,94 +0,0 @@
|
|||||||
use super::{
|
|
||||||
RSSFeed,
|
|
||||||
RSSFeedOutput,
|
|
||||||
fetch_feed,
|
|
||||||
get_redis,
|
|
||||||
parse,
|
|
||||||
save_to_redis,
|
|
||||||
task_err
|
|
||||||
};
|
|
||||||
|
|
||||||
use {
|
|
||||||
kon_libs::KonResult,
|
|
||||||
poise::serenity_prelude::{
|
|
||||||
Context,
|
|
||||||
async_trait
|
|
||||||
},
|
|
||||||
regex::Regex,
|
|
||||||
std::{
|
|
||||||
io::Cursor,
|
|
||||||
sync::Arc
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
pub struct RustBlog {
|
|
||||||
url: String
|
|
||||||
}
|
|
||||||
|
|
||||||
impl RustBlog {
|
|
||||||
pub fn new(url: String) -> Self { Self { url } }
|
|
||||||
}
|
|
||||||
|
|
||||||
#[async_trait]
|
|
||||||
impl RSSFeed for RustBlog {
|
|
||||||
fn name(&self) -> &str { "RustBlog" }
|
|
||||||
|
|
||||||
fn url(&self) -> &str { self.url.as_str() }
|
|
||||||
|
|
||||||
async fn process(
|
|
||||||
&self,
|
|
||||||
_ctx: Arc<Context>
|
|
||||||
) -> KonResult<Option<RSSFeedOutput>> {
|
|
||||||
let redis = get_redis().await;
|
|
||||||
let rkey = "RSS_RustBlog";
|
|
||||||
|
|
||||||
let res = fetch_feed(self.url()).await?;
|
|
||||||
let data = res.text().await?;
|
|
||||||
let cursor = Cursor::new(data);
|
|
||||||
|
|
||||||
let feed = parse(cursor).map_err(|e| {
|
|
||||||
task_err("RSS:RustBlog", &format!("Error parsing RSS feed: {e}"));
|
|
||||||
e
|
|
||||||
})?;
|
|
||||||
|
|
||||||
if feed.entries.is_empty() {
|
|
||||||
task_err("RSS:RustBlog", "No entries found in the feed!");
|
|
||||||
return Ok(None);
|
|
||||||
}
|
|
||||||
|
|
||||||
let article = feed.entries[0].clone();
|
|
||||||
let article_id = article.id.clone();
|
|
||||||
|
|
||||||
fn get_blog_title(input: String) -> Option<String> {
|
|
||||||
let re = Regex::new(r"https://blog\.rust-lang\.org/(\d{4}/\d{2}/\d{2}/[^/]+)").unwrap();
|
|
||||||
re.captures(input.as_str()).and_then(|caps| caps.get(1).map(|m| m.as_str().to_string()))
|
|
||||||
}
|
|
||||||
|
|
||||||
let cached_blog = redis.get(rkey).await.unwrap_or(None).unwrap_or_default();
|
|
||||||
|
|
||||||
if cached_blog.is_empty() {
|
|
||||||
save_to_redis(rkey, &get_blog_title(article.id).unwrap()).await?;
|
|
||||||
return Ok(None);
|
|
||||||
}
|
|
||||||
|
|
||||||
if let Some(blog_title) = get_blog_title(article.id) {
|
|
||||||
if blog_title == cached_blog {
|
|
||||||
Ok(None)
|
|
||||||
} else {
|
|
||||||
save_to_redis(rkey, &blog_title).await?;
|
|
||||||
|
|
||||||
Ok(Some(RSSFeedOutput::Content(format!(
|
|
||||||
"Rust Team has put out a new article!\n**[{}](<{}>)**",
|
|
||||||
article.links[0].title.clone().unwrap(),
|
|
||||||
article.links[0].href
|
|
||||||
))))
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
task_err(
|
|
||||||
"RSS:RustBlog",
|
|
||||||
&format!("Article URL does not match the expected RegEx pattern! ({article_id})")
|
|
||||||
);
|
|
||||||
Ok(None)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
@ -4,5 +4,5 @@ version = "0.1.0"
|
|||||||
edition = "2024"
|
edition = "2024"
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
tokenservice-client = { version = "0.4.1", registry = "gitea" }
|
tokenservice-client = { version = "0.4.3", registry = "gitea" }
|
||||||
tokio = { workspace = true }
|
tokio = { workspace = true }
|
||||||
|
Loading…
x
Reference in New Issue
Block a user