Compare commits

...

2 Commits

Author SHA1 Message Date
0a7c569c48 Rework the RSS processing
Some checks failed
Build and push Docker image / build (push) Successful in 7m22s
Build and push Docker image / deploy (push) Failing after 1s
2024-12-07 15:50:52 +11:00
a7f67c1ec8 Add context types to other commands 2024-12-07 13:29:26 +11:00
11 changed files with 559 additions and 354 deletions

View File

@ -9,11 +9,13 @@ concurrency:
group: ${{ github.workflow }}-${{ github.ref }} group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true cancel-in-progress: true
env:
CARGO_TERM_COLOR: always
RUNNER_TOOL_CACHE: /toolcache
jobs: jobs:
build: build:
runs-on: ubuntu-22.04 runs-on: ubuntu-22.04
env:
RUNNER_TOOL_CACHE: /toolcache
steps: steps:
- name: Set up Docker Buildx - name: Set up Docker Buildx
@ -42,7 +44,8 @@ jobs:
$HOME/.cargo/registry/index/ $HOME/.cargo/registry/index/
$HOME/.cargo/registry/cache/ $HOME/.cargo/registry/cache/
target/ target/
key: ${{ runner.os }}-cache-${{ steps.cargo-cache-key.outputs.hash }} key: ${{ runner.os }}-cargo-${{ steps.cargo-cache-key.outputs.hash }}
restore-keys: ${{ runner.os }}-cargo-
- name: Login to Gitea - name: Login to Gitea
uses: docker/login-action@v3 uses: docker/login-action@v3
@ -65,7 +68,6 @@ jobs:
run: | run: |
rustup target add x86_64-unknown-linux-musl rustup target add x86_64-unknown-linux-musl
export GIT_COMMIT_HASH=${{ github.sha }} && \ export GIT_COMMIT_HASH=${{ github.sha }} && \
export GIT_COMMIT_BRANCH=${{ github.ref_name }} && \
cargo zigbuild --target x86_64-unknown-linux-musl --locked -rF production cargo zigbuild --target x86_64-unknown-linux-musl --locked -rF production
- name: Build and push image - name: Build and push image
@ -84,7 +86,7 @@ jobs:
needs: build needs: build
steps: steps:
- name: Deploy update - name: Deploy update
uses: appleboy/ssh-action@v1.2.0 uses: appleboy/ssh-action@v1
with: with:
host: ${{ secrets.SSH_HOST }} host: ${{ secrets.SSH_HOST }}
username: ${{ secrets.SSH_USERNAME }} username: ${{ secrets.SSH_USERNAME }}

View File

@ -1,3 +1,8 @@
mod ilo;
mod midi;
mod status;
mod uptime;
use kon_libs::{ use kon_libs::{
KonData, KonData,
KonError, KonError,
@ -5,13 +10,6 @@ use kon_libs::{
PoiseCtx PoiseCtx
}; };
use poise::Command;
mod ilo;
mod midi;
mod status;
mod uptime;
use { use {
ilo::ilo, ilo::ilo,
midi::midi_to_wav, midi::midi_to_wav,
@ -25,7 +23,7 @@ macro_rules! commands {
} }
} }
pub fn register_cmds() -> Vec<Command<KonData, KonError>> { commands!(deploy, ping, midi_to_wav, status, ilo, uptime) } pub fn register_cmds() -> Vec<poise::Command<KonData, KonError>> { commands!(deploy, ping, ilo, midi_to_wav, status, uptime) }
/// Deploy the commands globally or in a guild /// Deploy the commands globally or in a guild
#[poise::command(prefix_command, owners_only, guild_only)] #[poise::command(prefix_command, owners_only, guild_only)]
@ -35,7 +33,7 @@ pub async fn deploy(ctx: PoiseCtx<'_>) -> KonResult<()> {
} }
/// Check if the bot is alive /// Check if the bot is alive
#[poise::command(slash_command)] #[poise::command(slash_command, install_context = "Guild|User", interaction_context = "Guild|BotDm|PrivateChannel")]
pub async fn ping(ctx: PoiseCtx<'_>) -> KonResult<()> { pub async fn ping(ctx: PoiseCtx<'_>) -> KonResult<()> {
ctx.reply(format!("Powong! `{:.2?}`", ctx.ping().await)).await?; ctx.reply(format!("Powong! `{:.2?}`", ctx.ping().await)).await?;
Ok(()) Ok(())

View File

@ -7,7 +7,10 @@ use {
}, },
poise::{ poise::{
CreateReply, CreateReply,
serenity_prelude::CreateAttachment serenity_prelude::{
CreateAttachment,
Message
}
}, },
regex::Regex, regex::Regex,
std::{ std::{
@ -23,12 +26,12 @@ use {
/// Convert MIDI file to WAV /// Convert MIDI file to WAV
#[poise::command( #[poise::command(
context_menu_command = "MIDI -> WAV", context_menu_command = "MIDI -> WAV",
install_context = "User", install_context = "Guild|User",
interaction_context = "Guild|BotDm|PrivateChannel" interaction_context = "Guild|BotDm|PrivateChannel"
)] )]
pub async fn midi_to_wav( pub async fn midi_to_wav(
ctx: super::PoiseCtx<'_>, ctx: super::PoiseCtx<'_>,
#[description = "MIDI file to be converted"] message: poise::serenity_prelude::Message #[description = "MIDI file to be converted"] message: Message
) -> KonResult<()> { ) -> KonResult<()> {
let re = Regex::new(r"(?i)\.mid$").unwrap(); let re = Regex::new(r"(?i)\.mid$").unwrap();
@ -87,9 +90,12 @@ pub async fn midi_to_wav(
&*wav_path, &*wav_path,
format_bytes(metadata(&*wav_path).unwrap().size()) format_bytes(metadata(&*wav_path).unwrap().size())
))) )))
.await .await?;
.unwrap();
} else if reply.is_ok() { } else if reply.is_ok() {
println!(
"Discord[{}]: Processed file uploaded back to Discord channel",
ctx.command().qualified_name
);
remove_file(midi_path)?; remove_file(midi_path)?;
remove_file(&*wav_path)?; remove_file(&*wav_path)?;
} }
@ -97,8 +103,7 @@ pub async fn midi_to_wav(
Err(y) => { Err(y) => {
ctx ctx
.send(CreateReply::default().content("Command didn't execute successfully, check console for more information!")) .send(CreateReply::default().content("Command didn't execute successfully, check console for more information!"))
.await .await?;
.unwrap();
return Err(KonError::from(format!("Midi conversion failed: {y}"))) return Err(KonError::from(format!("Midi conversion failed: {y}")))
} }

View File

@ -73,7 +73,12 @@ fn process_pms_statuses(servers: Vec<(String, Vec<Value>)>) -> Vec<(String, Stri
} }
/// Query the server statuses /// Query the server statuses
#[poise::command(slash_command, subcommands("wg"))] #[poise::command(
slash_command,
install_context = "Guild|User",
interaction_context = "Guild|BotDm|PrivateChannel",
subcommands("wg")
)]
pub async fn status(_: super::PoiseCtx<'_>) -> KonResult<()> { Ok(()) } pub async fn status(_: super::PoiseCtx<'_>) -> KonResult<()> { Ok(()) }
/// Retrieve the server statuses from Wargaming /// Retrieve the server statuses from Wargaming

View File

@ -46,7 +46,7 @@ fn get_os_info() -> String {
} }
/// Retrieve host and bot uptimes /// Retrieve host and bot uptimes
#[poise::command(slash_command)] #[poise::command(slash_command, install_context = "Guild|User", interaction_context = "Guild|BotDm|PrivateChannel")]
pub async fn uptime(ctx: super::PoiseCtx<'_>) -> KonResult<()> { pub async fn uptime(ctx: super::PoiseCtx<'_>) -> KonResult<()> {
let bot = ctx.http().get_current_user().await.unwrap(); let bot = ctx.http().get_current_user().await.unwrap();
let mut sys = System::new_all(); let mut sys = System::new_all();

View File

@ -5,6 +5,13 @@ mod github;
mod gportal; mod gportal;
mod rust; mod rust;
use {
esxi::Esxi,
github::GitHub,
gportal::GPortal,
rust::RustBlog
};
use super::{ use super::{
task_err, task_err,
task_info task_info
@ -21,7 +28,8 @@ use {
poise::serenity_prelude::{ poise::serenity_prelude::{
Context, Context,
CreateEmbed, CreateEmbed,
Timestamp Timestamp,
async_trait
}, },
regex::Regex, regex::Regex,
reqwest::Response, reqwest::Response,
@ -32,6 +40,8 @@ use {
} }
}; };
pub type RSSFeedBox = Box<dyn RSSFeed + Send + Sync>;
const TASK_NAME: &str = "RSS"; const TASK_NAME: &str = "RSS";
static REDIS_EXPIRY_SECS: i64 = 7200; static REDIS_EXPIRY_SECS: i64 = 7200;
static REDIS_SERVICE: OnceCell<Arc<RedisController>> = OnceCell::new(); static REDIS_SERVICE: OnceCell<Arc<RedisController>> = OnceCell::new();
@ -94,7 +104,7 @@ async fn save_to_redis(
let redis = get_redis().await; let redis = get_redis().await;
redis.set(key, value).await.unwrap(); redis.set(key, value).await.unwrap();
if let Err(y) = redis.expire(key, REDIS_EXPIRY_SECS).await { if let Err(y) = redis.expire(key, REDIS_EXPIRY_SECS).await {
task_err("RSS", format!("[RedisExpiry]: {}", y).as_str()); task_err("RSS", format!("[RedisExpiry]: {y}").as_str());
} }
Ok(()) Ok(())
} }
@ -143,6 +153,23 @@ impl IncidentColorMap {
} }
} }
#[async_trait]
pub trait RSSFeed {
fn name(&self) -> &str;
fn url(&self) -> &str;
async fn process(
&self,
ctx: Arc<Context>
) -> KonResult<Option<RSSFeedOutput>>;
}
/// Handle feed's output type for Discord message
pub enum RSSFeedOutput {
RegularEmbed(CreateEmbed),
IncidentEmbed(CreateEmbed),
Content(String)
}
pub async fn rss(ctx: Arc<Context>) -> KonResult<()> { pub async fn rss(ctx: Arc<Context>) -> KonResult<()> {
#[cfg(feature = "production")] #[cfg(feature = "production")]
let mut interval = interval(Duration::from_secs(300)); // Check feeds every 5 mins let mut interval = interval(Duration::from_secs(300)); // Check feeds every 5 mins
@ -151,6 +178,19 @@ pub async fn rss(ctx: Arc<Context>) -> KonResult<()> {
let mut first_run = true; let mut first_run = true;
task_info(TASK_NAME, "Task loaded!"); task_info(TASK_NAME, "Task loaded!");
let feeds: Vec<RSSFeedBox> = vec![
Box::new(Esxi::new("https://esxi-patches.v-front.de/atom/ESXi-7.0.0.xml".to_string())),
Box::new(GitHub::new("https://www.githubstatus.com/history.atom".to_string())),
Box::new(GPortal::new("https://status.g-portal.com/history.atom".to_string())),
Box::new(RustBlog::new("https://blog.rust-lang.org/feed.xml".to_string())),
];
let mut processor = processor::RSSProcessor::new();
for feed in feeds {
processor.add_feed(feed);
}
loop { loop {
interval.tick().await; interval.tick().await;
@ -158,6 +198,9 @@ pub async fn rss(ctx: Arc<Context>) -> KonResult<()> {
task_info(&format!("{TASK_NAME}:Processor"), "Starting up!"); task_info(&format!("{TASK_NAME}:Processor"), "Starting up!");
first_run = false; first_run = false;
} }
processor::feed_processor(&ctx).await;
if let Err(e) = processor.process_all(ctx.clone()).await {
task_err(&format!("{TASK_NAME}:Processor"), &e.to_string());
}
} }
} }

View File

@ -1,88 +1,115 @@
use super::{ use super::{
super::task_err, RSSFeed,
REDIS_EXPIRY_SECS, RSSFeedOutput,
fetch_feed, fetch_feed,
format_href_to_discord, format_href_to_discord,
get_redis, get_redis,
parse, parse,
save_to_redis save_to_redis,
task_err
}; };
use { use {
kon_libs::KonResult, kon_libs::KonResult,
poise::serenity_prelude::{ poise::serenity_prelude::{
Context,
CreateEmbed, CreateEmbed,
CreateEmbedAuthor, CreateEmbedAuthor,
Timestamp Timestamp,
async_trait
}, },
regex::Regex, regex::Regex,
std::io::Cursor std::{
io::Cursor,
sync::Arc
}
}; };
pub async fn esxi_embed() -> KonResult<Option<CreateEmbed>> { pub struct Esxi {
let redis = get_redis().await; url: String
let rkey = "RSS_ESXi"; }
let url = "https://esxi-patches.v-front.de/atom/ESXi-7.0.0.xml";
let res = fetch_feed(url).await?; impl Esxi {
let data = res.text().await?; pub fn new(url: String) -> Self { Self { url } }
let cursor = Cursor::new(data); }
let feed = parse(cursor).unwrap(); #[async_trait]
let home_page = feed.links[0].clone().href; impl RSSFeed for Esxi {
let article = feed.entries[0].clone(); fn name(&self) -> &str { "ESXi" }
fn get_patch_version(input: &str) -> Option<String> { fn url(&self) -> &str { self.url.as_str() }
let re = Regex::new(r#"(?i)Update\s+([0-9]+)([a-z]?)"#).unwrap();
if let Some(caps) = re.captures(input) { async fn process(
let update_num = caps[1].to_string(); &self,
let letter = caps.get(2).map_or("", |m| m.as_str()); _ctx: Arc<Context>
Some(format!("Update {}{}", update_num, letter)) ) -> KonResult<Option<RSSFeedOutput>> {
} else { let redis = get_redis().await;
None let rkey = "RSS_ESXi";
let res = fetch_feed(self.url()).await?;
let data = res.text().await?;
let cursor = Cursor::new(data);
let feed = parse(cursor).map_err(|e| {
task_err("RSS:ESXi", &format!("Error parsing RSS feed: {e}"));
e
})?;
if feed.entries.is_empty() {
task_err("RSS:ESXi", "No entries found in the feed!");
return Ok(None);
} }
}
let cached_patch = redis.get(rkey).await.unwrap().unwrap_or_default(); let home_page = feed.links[0].clone().href;
let article = feed.entries[0].clone();
if cached_patch.is_empty() { fn get_patch_version(input: &str) -> Option<String> {
redis.set(rkey, &article.categories[3].term).await.unwrap(); let re = Regex::new(r#"(?i)Update\s+([0-9]+)([a-z]?)"#).unwrap();
if let Err(y) = redis.expire(rkey, REDIS_EXPIRY_SECS).await {
task_err("RSS", format!("[RedisExpiry]: {}", y).as_str()); if let Some(caps) = re.captures(input) {
let update_num = caps[1].to_string();
let letter = caps.get(2).map_or("", |m| m.as_str());
Some(format!("Update {update_num}{letter}"))
} else {
None
}
} }
return Ok(None);
}
if let Some(patch) = get_patch_version(&article.categories[3].term) { let cached_patch = redis.get(rkey).await.unwrap_or(None).unwrap_or_default();
if patch == cached_patch {
Ok(None) if cached_patch.is_empty() {
} else {
save_to_redis(rkey, &article.categories[3].term).await?; save_to_redis(rkey, &article.categories[3].term).await?;
Ok(Some( return Ok(None);
CreateEmbed::new() }
.color(0x4EFBCB)
.author(CreateEmbedAuthor::new(feed.title.unwrap().content).url(home_page)) if let Some(patch) = get_patch_version(&article.categories[3].term) {
.thumbnail(feed.logo.unwrap().uri) if patch == cached_patch {
.description(format!( Ok(None)
"{} {} for {} {} has been rolled out!\n{}", } else {
article.categories[2].term, save_to_redis(rkey, &article.categories[3].term).await?;
article.categories[3].term,
article.categories[0].term, Ok(Some(RSSFeedOutput::RegularEmbed(
article.categories[1].term, CreateEmbed::new()
format_href_to_discord(article.summary.unwrap().content.as_str()) .color(0x4EFBCB)
)) .author(CreateEmbedAuthor::new(feed.title.unwrap().content).url(home_page))
.timestamp(Timestamp::from(article.updated.unwrap())) .thumbnail(feed.logo.unwrap().uri)
)) .description(format!(
"{} {} for {} {} has been rolled out!\n{}",
article.categories[2].term,
article.categories[3].term,
article.categories[0].term,
article.categories[1].term,
format_href_to_discord(&article.summary.unwrap().content)
))
.timestamp(Timestamp::from(article.updated.unwrap()))
)))
}
} else {
task_err(
"RSS:ESXi",
&format!("Article term does not match the expected RegEx pattern! ({})", article.categories[3].term)
);
Ok(None)
} }
} else {
task_err(
"RSS:ESXi",
&format!(
"Article term does not match the expected RegEx pattern! ({})",
article.categories[3].term.as_str()
)
);
Ok(None)
} }
} }

View File

@ -1,111 +1,140 @@
use super::{ use super::{
super::task_err,
IncidentColorMap, IncidentColorMap,
REDIS_EXPIRY_SECS, RSSFeed,
RSSFeedOutput,
embed, embed,
fetch_feed, fetch_feed,
format_html_to_discord, format_html_to_discord,
get_redis, get_redis,
parse, parse,
save_to_redis, save_to_redis,
task_err,
trim_old_content trim_old_content
}; };
use { use {
kon_libs::KonResult, kon_libs::KonResult,
poise::serenity_prelude::{ poise::serenity_prelude::{
CreateEmbed, Context,
Timestamp Timestamp,
async_trait
}, },
regex::Regex, regex::Regex,
std::io::Cursor std::{
io::Cursor,
sync::Arc
}
}; };
pub async fn github_embed() -> KonResult<Option<CreateEmbed>> { pub struct GitHub {
let redis = get_redis().await; url: String
let rkey = "RSS_GitHub"; }
let rkey_content = format!("{}_Content", rkey);
let url = "https://www.githubstatus.com/history.atom";
let res = fetch_feed(url).await?; impl GitHub {
let data = res.text().await?; pub fn new(url: String) -> Self { Self { url } }
let cursor = Cursor::new(data); }
let feed = parse(cursor).unwrap(); #[async_trait]
let incident_page = feed.entries[0].links[0].clone().href; impl RSSFeed for GitHub {
let article = feed.entries[0].clone(); fn name(&self) -> &str { "GitHub" }
fn get_incident_id(input: &str) -> Option<String> { fn url(&self) -> &str { self.url.as_str() }
let re = Regex::new(r#"/incidents/([a-zA-Z0-9]+)$"#).unwrap();
re.captures(input).map(|caps| caps[1].to_string()) async fn process(
} &self,
_ctx: Arc<Context>
) -> KonResult<Option<RSSFeedOutput>> {
let redis = get_redis().await;
let rkey = "RSS_GitHub";
let rkey_content = format!("{rkey}_Content");
let cached_incident = redis.get(rkey).await.unwrap().unwrap_or_default(); let res = fetch_feed(self.url()).await?;
let new_content = format_html_to_discord(article.content.unwrap().body.unwrap()); let data = res.text().await?;
let cursor = Cursor::new(data);
let update_patt = Regex::new(r"(?i)\bupdate\b").unwrap(); let feed = parse(cursor).map_err(|e| {
let investigating_patt = Regex::new(r"(?i)\binvestigating\b").unwrap(); task_err("RSS:GitHub", &format!("Error parsing RSS feed: {e}"));
let resolved_patt = Regex::new(r"(?i)\bresolved\b").unwrap(); e
let date_patt = Regex::new(r"\b[A-Z][a-z]{2} \d{2}, \d{2}:\d{2} UTC\b").unwrap(); })?;
let first_entry = date_patt if feed.entries.is_empty() {
.split(&new_content) task_err("RSS:GitHub", "No entries found in the feed!");
.map(str::trim) return Ok(None);
.find(|e| !e.is_empty())
.unwrap_or(&new_content);
let color: u32 = if update_patt.is_match(first_entry) {
IncidentColorMap::Update.color()
} else if investigating_patt.is_match(first_entry) {
IncidentColorMap::Investigating.color()
} else if resolved_patt.is_match(first_entry) {
IncidentColorMap::Resolved.color()
} else {
IncidentColorMap::Default.color()
};
if cached_incident.is_empty() {
redis.set(rkey, &get_incident_id(&article.links[0].href).unwrap()).await.unwrap();
redis.set(&rkey_content, &new_content).await.unwrap();
if let Err(y) = redis.expire(rkey, REDIS_EXPIRY_SECS).await {
task_err("RSS", format!("[RedisExpiry]: {}", y).as_str());
} }
return Ok(None);
}
if let Some(incident) = get_incident_id(&article.links[0].href) { let incident_page = feed.entries[0].links[0].clone().href;
if incident == cached_incident { let article = feed.entries[0].clone();
let cached_content: String = redis.get(&rkey_content).await.unwrap().unwrap_or_default();
if cached_content == new_content { fn get_incident_id(input: &str) -> Option<String> {
Ok(None) let re = Regex::new(r#"/incidents/([a-zA-Z0-9]+)$"#).unwrap();
re.captures(input).map(|caps| caps[1].to_string())
}
let cached_incident = redis.get(rkey).await.unwrap().unwrap_or_default();
let new_content = format_html_to_discord(article.content.unwrap().body.unwrap());
let update_patt = Regex::new(r"(?i)\bupdate\b").unwrap();
let investigating_patt = Regex::new(r"(?i)\binvestigating\b").unwrap();
let resolved_patt = Regex::new(r"(?i)\bresolved\b").unwrap();
let date_patt = Regex::new(r"\b[A-Z][a-z]{2} \d{2}, \d{2}:\d{2} UTC\b").unwrap();
let first_entry = date_patt
.split(&new_content)
.map(str::trim)
.find(|e| !e.is_empty())
.unwrap_or(&new_content);
let color: u32 = if update_patt.is_match(first_entry) {
IncidentColorMap::Update.color()
} else if investigating_patt.is_match(first_entry) {
IncidentColorMap::Investigating.color()
} else if resolved_patt.is_match(first_entry) {
IncidentColorMap::Resolved.color()
} else {
IncidentColorMap::Default.color()
};
if cached_incident.is_empty() {
save_to_redis(rkey, &get_incident_id(&article.links[0].href).unwrap()).await?;
save_to_redis(&rkey_content, &new_content).await?;
return Ok(None);
}
if let Some(incident) = get_incident_id(&article.links[0].href) {
if incident == cached_incident {
let cached_content = redis.get(&rkey_content).await.unwrap().unwrap_or_default();
if cached_content == new_content {
Ok(None)
} else {
redis.set(&rkey_content, &new_content).await.unwrap();
redis.expire(&rkey_content, 21600).await.unwrap();
Ok(Some(RSSFeedOutput::IncidentEmbed(embed(
color,
article.title.unwrap().content,
incident_page,
trim_old_content(&new_content),
Timestamp::from(article.updated.unwrap())
))))
}
} else { } else {
save_to_redis(rkey, &incident).await?;
redis.set(&rkey_content, &new_content).await.unwrap(); redis.set(&rkey_content, &new_content).await.unwrap();
redis.expire(&rkey_content, 21600).await.unwrap();
Ok(Some(embed( Ok(Some(RSSFeedOutput::IncidentEmbed(embed(
color, color,
article.title.unwrap().content, article.title.unwrap().content,
incident_page, incident_page,
trim_old_content(&new_content), trim_old_content(&new_content),
Timestamp::from(article.updated.unwrap()) Timestamp::from(article.updated.unwrap())
))) ))))
} }
} else { } else {
save_to_redis(rkey, &incident).await?; task_err(
redis.set(&rkey_content, &new_content).await.unwrap(); "RSS:GitHub",
Ok(Some(embed( &format!("Incident ID does not match the expected RegEx pattern! ({})", &article.links[0].href)
color, );
article.title.unwrap().content, Ok(None)
incident_page,
trim_old_content(&new_content),
Timestamp::from(article.updated.unwrap())
)))
} }
} else {
task_err(
"RSS:GitHub",
&format!("Incident ID does not match the expected RegEx pattern! ({})", &article.links[0].href)
);
Ok(None)
} }
} }

View File

@ -1,114 +1,143 @@
use super::{ use super::{
super::task_err,
IncidentColorMap, IncidentColorMap,
REDIS_EXPIRY_SECS, RSSFeed,
RSSFeedOutput,
embed, embed,
fetch_feed, fetch_feed,
format_html_to_discord, format_html_to_discord,
get_redis, get_redis,
parse, parse,
save_to_redis, save_to_redis,
task_err,
trim_old_content trim_old_content
}; };
use { use {
kon_libs::KonResult, kon_libs::KonResult,
poise::serenity_prelude::{ poise::serenity_prelude::{
CreateEmbed, Context,
Timestamp Timestamp,
async_trait
}, },
regex::Regex, regex::Regex,
std::io::Cursor std::{
io::Cursor,
sync::Arc
}
}; };
pub async fn gportal_embed() -> KonResult<Option<CreateEmbed>> { pub struct GPortal {
let redis = get_redis().await; url: String
let rkey = "RSS_GPortal"; }
let rkey_content = format!("{}_Content", rkey);
let url = "https://status.g-portal.com/history.atom";
let res = fetch_feed(url).await?; impl GPortal {
let data = res.text().await?; pub fn new(url: String) -> Self { Self { url } }
let cursor = Cursor::new(data); }
let feed = parse(cursor).unwrap(); #[async_trait]
let incident_page = feed.links[0].clone().href; impl RSSFeed for GPortal {
let article = feed.entries[0].clone(); fn name(&self) -> &str { "GPortal" }
fn get_incident_id(input: &str) -> Option<String> { fn url(&self) -> &str { self.url.as_str() }
let re = Regex::new(r#"/incidents/([a-zA-Z0-9]+)$"#).unwrap();
re.captures(input).map(|caps| caps[1].to_string()) async fn process(
} &self,
_ctx: Arc<Context>
) -> KonResult<Option<RSSFeedOutput>> {
let redis = get_redis().await;
let rkey = "RSS_GPortal";
let rkey_content = format!("{rkey}_Content");
let cached_incident = redis.get(rkey).await.unwrap().unwrap_or_default(); let res = fetch_feed(self.url()).await?;
let new_content = format_html_to_discord(article.content.unwrap().body.unwrap()); let data = res.text().await?;
let cursor = Cursor::new(data);
let update_patt = Regex::new(r"(?i)\bupdate\b").unwrap(); let feed = parse(cursor).map_err(|e| {
let investigating_patt = Regex::new(r"(?i)\binvestigating\b").unwrap(); task_err("RSS:GPortal", &format!("Error parsing RSS feed: {e}"));
let monitoring_patt = Regex::new(r"(?i)\bmonitoring\b").unwrap(); e
let resolved_patt = Regex::new(r"(?i)\bresolved\b").unwrap(); })?;
let date_patt = Regex::new(r"\b[A-Z][a-z]{2} \d{2}, \d{2}:\d{2} UTC\b").unwrap();
let first_entry = date_patt if feed.entries.is_empty() {
.split(&new_content) task_err("RSS:GPortal", "No entries found in the feed!");
.map(str::trim) return Ok(None);
.find(|e| !e.is_empty())
.unwrap_or(&new_content);
let color: u32 = if update_patt.is_match(first_entry) {
IncidentColorMap::Update.color()
} else if investigating_patt.is_match(first_entry) {
IncidentColorMap::Investigating.color()
} else if monitoring_patt.is_match(first_entry) {
IncidentColorMap::Monitoring.color()
} else if resolved_patt.is_match(first_entry) {
IncidentColorMap::Resolved.color()
} else {
IncidentColorMap::Default.color()
};
if cached_incident.is_empty() {
redis.set(rkey, &get_incident_id(&article.links[0].href).unwrap()).await.unwrap();
redis.set(&rkey_content, &new_content).await.unwrap();
if let Err(y) = redis.expire(rkey, REDIS_EXPIRY_SECS).await {
task_err("RSS", format!("[RedisExpiry]: {}", y).as_str());
} }
return Ok(None);
}
if let Some(incident) = get_incident_id(&article.links[0].href) { let incident_page = feed.links[0].clone().href;
if incident == cached_incident { let article = feed.entries[0].clone();
let cached_content: String = redis.get(&rkey_content).await.unwrap().unwrap_or_default();
if cached_content == new_content { fn get_incident_id(input: &str) -> Option<String> {
Ok(None) let re = Regex::new(r#"/incidents/([a-zA-Z0-9]+)$"#).unwrap();
re.captures(input).map(|caps| caps[1].to_string())
}
let cached_incident = redis.get(rkey).await.unwrap().unwrap_or_default();
let new_content = format_html_to_discord(article.content.unwrap().body.unwrap());
let update_patt = Regex::new(r"(?i)\bupdate\b").unwrap();
let investigating_patt = Regex::new(r"(?i)\binvestigating\b").unwrap();
let monitoring_patt = Regex::new(r"(?i)\bmonitoring\b").unwrap();
let resolved_patt = Regex::new(r"(?i)\bresolved\b").unwrap();
let date_patt = Regex::new(r"\b[A-Z][a-z]{2} \d{2}, \d{2}:\d{2} UTC\b").unwrap();
let first_entry = date_patt
.split(&new_content)
.map(str::trim)
.find(|e| !e.is_empty())
.unwrap_or(&new_content);
let color: u32 = if update_patt.is_match(first_entry) {
IncidentColorMap::Update.color()
} else if investigating_patt.is_match(first_entry) {
IncidentColorMap::Investigating.color()
} else if monitoring_patt.is_match(first_entry) {
IncidentColorMap::Monitoring.color()
} else if resolved_patt.is_match(first_entry) {
IncidentColorMap::Resolved.color()
} else {
IncidentColorMap::Default.color()
};
if cached_incident.is_empty() {
save_to_redis(rkey, &get_incident_id(&article.links[0].href).unwrap()).await?;
save_to_redis(&rkey_content, &new_content).await?;
return Ok(None);
}
if let Some(incident) = get_incident_id(&article.links[0].href) {
if incident == cached_incident {
let cached_content = redis.get(&rkey_content).await.unwrap().unwrap_or_default();
if cached_content == new_content {
Ok(None)
} else {
redis.set(&rkey_content, &new_content).await.unwrap();
redis.expire(&rkey_content, 21600).await.unwrap();
Ok(Some(RSSFeedOutput::IncidentEmbed(embed(
color,
article.title.unwrap().content,
incident_page,
trim_old_content(&new_content),
Timestamp::from(article.updated.unwrap())
))))
}
} else { } else {
save_to_redis(rkey, &incident).await?;
redis.set(&rkey_content, &new_content).await.unwrap(); redis.set(&rkey_content, &new_content).await.unwrap();
redis.expire(&rkey_content, 21600).await.unwrap();
Ok(Some(embed( Ok(Some(RSSFeedOutput::IncidentEmbed(embed(
color, color,
article.title.unwrap().content, article.title.unwrap().content,
incident_page, incident_page,
trim_old_content(&new_content), trim_old_content(&new_content),
Timestamp::from(article.updated.unwrap()) Timestamp::from(article.updated.unwrap())
))) ))))
} }
} else { } else {
save_to_redis(rkey, &incident).await?; task_err(
redis.set(&rkey_content, &new_content).await.unwrap(); "RSS:GPortal",
Ok(Some(embed( &format!("Incident ID does not match the expected RegEx pattern! ({})", &article.links[0].href)
color, );
article.title.unwrap().content, Ok(None)
incident_page,
trim_old_content(&new_content),
Timestamp::from(article.updated.unwrap())
)))
} }
} else {
task_err(
"RSS:GPortal",
&format!("Incident ID does not match the expected RegEx pattern! ({})", &article.links[0].href)
);
Ok(None)
} }
} }

View File

@ -4,13 +4,10 @@ use kon_libs::{
}; };
use super::{ use super::{
RSSFeedBox,
RSSFeedOutput,
TASK_NAME, TASK_NAME,
esxi::esxi_embed, get_redis
get_redis,
github::github_embed,
gportal::gportal_embed,
rust::rust_message,
task_err
}; };
use { use {
@ -19,9 +16,11 @@ use {
Context, Context,
CreateEmbed, CreateEmbed,
CreateMessage, CreateMessage,
EditMessage EditMessage,
Http
}, },
regex::Regex, regex::Regex,
std::sync::Arc,
tokio::time::{ tokio::time::{
Duration, Duration,
sleep sleep
@ -32,29 +31,52 @@ use {
/* std::fs::File::create("rss_name.log").unwrap(); /* std::fs::File::create("rss_name.log").unwrap();
std::fs::write("rss_name.log", format!("{:#?}", feed))?; */ std::fs::write("rss_name.log", format!("{:#?}", feed))?; */
// todo; have a reusable function for feeding RSS data and building the embed out of it. async fn process_regular_embed(
// see github.rs / esxi.rs / gportal.rs for references of this idea. http: &Http,
embed: CreateEmbed,
redis_key: &str
) -> KonResult<()> {
let redis = get_redis().await;
let channel = ChannelId::new(BINARY_PROPERTIES.rss_channel);
async fn process_embed( let msg_id_key: Option<String> = redis.get(redis_key).await?;
ctx: &Context,
embed: Option<CreateEmbed>, if let Some(msg_id_key) = msg_id_key {
if let Ok(msg_id) = msg_id_key.parse::<u64>() {
if let Ok(mut message) = channel.message(http, msg_id).await {
message.edit(http, EditMessage::new().embed(embed)).await?;
}
}
} else {
let message = channel.send_message(http, CreateMessage::new().add_embed(embed)).await?;
redis.set(redis_key, &message.id.to_string()).await?;
redis.expire(redis_key, 36000).await?;
}
Ok(())
}
/// Cache-based embed updater for ongoing outages/incidents
async fn process_incident_embed(
http: &Http,
embed: CreateEmbed,
redis_key: &str, redis_key: &str,
content_key: &str content_key: &str
) -> KonResult<()> { ) -> KonResult<()> {
if let Some(embed) = embed { let redis = get_redis().await;
let redis = get_redis().await; let channel = ChannelId::new(BINARY_PROPERTIES.rss_channel);
let channel = ChannelId::new(BINARY_PROPERTIES.rss_channel);
let msg_id_key: Option<String> = redis.get(redis_key).await?; let msg_id_key: Option<String> = redis.get(redis_key).await?;
let cached_content: Option<String> = redis.get(content_key).await.unwrap_or(None); let cached_content: Option<String> = redis.get(content_key).await.unwrap_or(None);
if let Some(msg_id_key) = msg_id_key { if let Some(msg_id_key) = msg_id_key {
if let Ok(msg_id) = msg_id_key.parse::<u64>() { if let Ok(msg_id) = msg_id_key.parse::<u64>() {
if let Ok(mut message) = channel.message(&ctx.http, msg_id).await { if let Ok(mut message) = channel.message(http, msg_id).await {
let new_description = message.embeds[0].description.clone().unwrap(); if let Some(existing) = message.embeds.first() {
let new_description = existing.description.clone().unwrap();
if cached_content.as_deref() != Some(&new_description) { if cached_content.as_deref() != Some(&new_description) {
message.edit(&ctx.http, EditMessage::new().embed(embed)).await?; message.edit(http, EditMessage::new().embed(embed)).await?;
} }
sleep(Duration::from_secs(15)).await; sleep(Duration::from_secs(15)).await;
@ -64,81 +86,94 @@ async fn process_embed(
} }
} }
} }
} else {
let message = channel.send_message(&ctx.http, CreateMessage::new().add_embed(embed)).await?;
redis.set(redis_key, &message.id.to_string()).await?;
redis.expire(redis_key, 36000).await?;
} }
} else {
let message = channel.send_message(http, CreateMessage::new().add_embed(embed)).await?;
redis.set(redis_key, &message.id.to_string()).await?;
redis.expire(redis_key, 36000).await?;
} }
Ok(()) Ok(())
} }
pub async fn feed_processor(ctx: &Context) { /// Process the content string
let mut log_msgs: Vec<String> = Vec::new(); async fn process_msg_content(
http: &Http,
content: String,
redis_key: &str
) -> KonResult<()> {
let redis = get_redis().await;
let channel = ChannelId::new(BINARY_PROPERTIES.rss_channel);
match esxi_embed().await { let msg_id_key: Option<String> = redis.get(redis_key).await?;
Ok(Some(embed)) => {
ChannelId::new(BINARY_PROPERTIES.rss_channel) if let Some(msg_id_key) = msg_id_key {
.send_message(&ctx.http, CreateMessage::new().add_embed(embed)) if let Ok(msg_id) = msg_id_key.parse::<u64>() {
.await channel.edit_message(http, msg_id, EditMessage::new().content(content)).await?;
.unwrap();
},
Ok(None) => (),
Err(y) => {
log_msgs.push(format!(
"**[{TASK_NAME}:ESXi:Error]:** Feed failed with the following error:```\n{}\n```",
y
));
task_err(TASK_NAME, &y.to_string())
} }
} else {
let message = channel.send_message(http, CreateMessage::new().content(content)).await?;
redis.set(redis_key, &message.id.to_string()).await?;
redis.expire(redis_key, 36000).await?;
} }
match gportal_embed().await { Ok(())
Ok(Some(embed)) => process_embed(ctx, Some(embed), "RSS_GPortal_MsgID", "RSS_GPortal_Content").await.unwrap(), }
Ok(None) => (),
Err(y) => { pub struct RSSProcessor {
log_msgs.push(format!( pub feeds: Vec<RSSFeedBox>
"**[{TASK_NAME}:GPortal:Error]:** Feed failed with the following error:```\n{}\n```", }
y
)); impl RSSProcessor {
task_err(TASK_NAME, &y.to_string()) pub fn new() -> Self { Self { feeds: Vec::new() } }
}
pub fn add_feed(
&mut self,
feed: RSSFeedBox
) {
self.feeds.push(feed);
} }
match github_embed().await { pub async fn process_all(
Ok(Some(embed)) => process_embed(ctx, Some(embed), "RSS_GitHub_MsgID", "RSS_GitHub_Content").await.unwrap(), &self,
Ok(None) => (), ctx: Arc<Context>
Err(y) => { ) -> KonResult<()> {
log_msgs.push(format!( let mut discord_msg: Vec<String> = Vec::new();
"**[{TASK_NAME}:GitHub:Error]:** Feed failed with the following error:```\n{}\n```",
y
));
task_err(TASK_NAME, &y.to_string())
}
}
match rust_message().await { for feed in &self.feeds {
Ok(Some(content)) => { let feed_name = feed.name();
ChannelId::new(BINARY_PROPERTIES.rss_channel) let redis_key = format!("RSS_{feed_name}_MsgId");
.send_message(&ctx.http, CreateMessage::new().content(content)) let error_msg = format!("**[{TASK_NAME}:{feed_name}:Error]:** Feed failed with the following error:```\n{{ error }}\n```");
.await
.unwrap();
},
Ok(None) => (),
Err(y) => {
log_msgs.push(format!(
"**[{TASK_NAME}:RustBlog:Error]:** Feed failed with the following error:```\n{}\n```",
y
));
task_err(TASK_NAME, &y.to_string())
}
}
if !log_msgs.is_empty() { match feed.process(ctx.clone()).await {
ChannelId::new(BINARY_PROPERTIES.kon_logs) Ok(Some(output)) => match output {
.send_message(&ctx.http, CreateMessage::new().content(log_msgs.join("\n"))) RSSFeedOutput::RegularEmbed(embed) => {
.await if let Err(e) = process_regular_embed(&ctx.http, embed, &redis_key).await {
.unwrap(); discord_msg.push(error_msg.replace("{{ error }}", &e.to_string()))
}
},
RSSFeedOutput::IncidentEmbed(embed) => {
if let Err(e) = process_incident_embed(&ctx.http, embed, &redis_key, &format!("RSS_{feed_name}_Content")).await {
discord_msg.push(error_msg.replace("{{ error }}", &e.to_string()))
}
},
RSSFeedOutput::Content(content) => {
if let Err(e) = process_msg_content(&ctx.http, content, &redis_key).await {
discord_msg.push(error_msg.replace("{{ error }}", &e.to_string()))
}
},
},
Ok(None) => (),
Err(e) => discord_msg.push(error_msg.replace("{{ error }}", &e.to_string()))
}
}
if !discord_msg.is_empty() {
ChannelId::new(BINARY_PROPERTIES.kon_logs)
.send_message(&ctx.http, CreateMessage::new().content(discord_msg.join("\n")))
.await?;
}
Ok(())
} }
} }

View File

@ -1,5 +1,6 @@
use super::{ use super::{
REDIS_EXPIRY_SECS, RSSFeed,
RSSFeedOutput,
fetch_feed, fetch_feed,
get_redis, get_redis,
parse, parse,
@ -9,54 +10,85 @@ use super::{
use { use {
kon_libs::KonResult, kon_libs::KonResult,
poise::serenity_prelude::{
Context,
async_trait
},
regex::Regex, regex::Regex,
std::io::Cursor std::{
io::Cursor,
sync::Arc
}
}; };
pub async fn rust_message() -> KonResult<Option<String>> { pub struct RustBlog {
let redis = get_redis().await; url: String
let rkey = "RSS_RustBlog"; }
let url = "https://blog.rust-lang.org/feed.xml";
let res = fetch_feed(url).await?; impl RustBlog {
let data = res.text().await?; pub fn new(url: String) -> Self { Self { url } }
let cursor = Cursor::new(data); }
let feed = parse(cursor).unwrap(); #[async_trait]
let article = feed.entries[0].clone(); impl RSSFeed for RustBlog {
let article_id = article.id.clone(); fn name(&self) -> &str { "RustBlog" }
fn get_blog_title(input: String) -> Option<String> { fn url(&self) -> &str { self.url.as_str() }
let re = Regex::new(r"https://blog\.rust-lang\.org/(\d{4}/\d{2}/\d{2}/[^/]+)").unwrap();
re.captures(input.as_str()).and_then(|caps| caps.get(1).map(|m| m.as_str().to_string()))
}
let cached_blog = redis.get(rkey).await.unwrap().unwrap_or_default(); async fn process(
&self,
_ctx: Arc<Context>
) -> KonResult<Option<RSSFeedOutput>> {
let redis = get_redis().await;
let rkey = "RSS_RustBlog";
if cached_blog.is_empty() { let res = fetch_feed(self.url()).await?;
redis.set(rkey, get_blog_title(article.id).unwrap().as_str()).await.unwrap(); let data = res.text().await?;
if let Err(y) = redis.expire(rkey, REDIS_EXPIRY_SECS).await { let cursor = Cursor::new(data);
task_err("RSS", format!("[RedisExpiry]: {}", y).as_str());
let feed = parse(cursor).map_err(|e| {
task_err("RSS:RustBlog", &format!("Error parsing RSS feed: {e}"));
e
})?;
if feed.entries.is_empty() {
task_err("RSS:RustBlog", "No entries found in the feed!");
return Ok(None);
} }
return Ok(None);
}
if let Some(blog) = get_blog_title(article.id) { let article = feed.entries[0].clone();
if blog == cached_blog { let article_id = article.id.clone();
Ok(None)
fn get_blog_title(input: String) -> Option<String> {
let re = Regex::new(r"https://blog\.rust-lang\.org/(\d{4}/\d{2}/\d{2}/[^/]+)").unwrap();
re.captures(input.as_str()).and_then(|caps| caps.get(1).map(|m| m.as_str().to_string()))
}
let cached_blog = redis.get(rkey).await.unwrap_or(None).unwrap_or_default();
if cached_blog.is_empty() {
save_to_redis(rkey, &get_blog_title(article.id).unwrap()).await?;
return Ok(None);
}
if let Some(blog_title) = get_blog_title(article.id) {
if blog_title == cached_blog {
Ok(None)
} else {
save_to_redis(rkey, &blog_title).await?;
Ok(Some(RSSFeedOutput::Content(format!(
"Rust Team has put out a new article!\n**[{}](<{}>)**",
article.links[0].title.clone().unwrap(),
article.links[0].href
))))
}
} else { } else {
save_to_redis(rkey, &blog).await?; task_err(
Ok(Some(format!( "RSS:RustBlog",
"Rust Team has put out a new article!\n**[{}](<{}>)**", &format!("Article URL does not match the expected RegEx pattern! ({article_id})")
article.links[0].title.clone().unwrap(), );
article.links[0].href Ok(None)
)))
} }
} else {
task_err(
"RSS:RustBlog",
&format!("Article URL does not match the expected RegEx pattern! ({})", article_id)
);
Ok(None)
} }
} }