Restructure the RSS task
This commit is contained in:
parent
f9471e471b
commit
1fa75a4d6f
2
Cargo.lock
generated
2
Cargo.lock
generated
@ -1023,7 +1023,7 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "kon"
|
name = "kon"
|
||||||
version = "0.3.15"
|
version = "0.3.16"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"bb8",
|
"bb8",
|
||||||
"bb8-postgres",
|
"bb8-postgres",
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
[package]
|
[package]
|
||||||
name = "kon"
|
name = "kon"
|
||||||
version = "0.3.15"
|
version = "0.3.16"
|
||||||
edition = "2021"
|
edition = "2021"
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
|
@ -72,6 +72,11 @@ impl RedisController {
|
|||||||
conn.get(key).await
|
conn.get(key).await
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub async fn del(&self, key: &str) -> RedisResult<()> {
|
||||||
|
let mut conn = self.pool.get().await.unwrap();
|
||||||
|
conn.del(key).await
|
||||||
|
}
|
||||||
|
|
||||||
/// Set a key with a value in the cache
|
/// Set a key with a value in the cache
|
||||||
pub async fn set(&self, key: &str, value: &str) -> RedisResult<()> {
|
pub async fn set(&self, key: &str, value: &str) -> RedisResult<()> {
|
||||||
let mut conn = self.pool.get().await.unwrap();
|
let mut conn = self.pool.get().await.unwrap();
|
||||||
|
@ -1,3 +1,10 @@
|
|||||||
|
mod processor; // Process the feeds and send it off to Discord
|
||||||
|
|
||||||
|
mod esxi;
|
||||||
|
mod github;
|
||||||
|
mod gportal;
|
||||||
|
mod rust_msg;
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
Error,
|
Error,
|
||||||
controllers::cache::RedisController
|
controllers::cache::RedisController
|
||||||
@ -15,17 +22,10 @@ use once_cell::sync::OnceCell;
|
|||||||
use feed_rs::parser::parse;
|
use feed_rs::parser::parse;
|
||||||
use reqwest::Response;
|
use reqwest::Response;
|
||||||
use regex::Regex;
|
use regex::Regex;
|
||||||
use std::{
|
use std::sync::Arc;
|
||||||
sync::Arc,
|
|
||||||
io::Cursor
|
|
||||||
};
|
|
||||||
use poise::serenity_prelude::{
|
use poise::serenity_prelude::{
|
||||||
Context,
|
Context,
|
||||||
ChannelId,
|
|
||||||
EditMessage,
|
|
||||||
CreateMessage,
|
|
||||||
CreateEmbed,
|
CreateEmbed,
|
||||||
CreateEmbedAuthor,
|
|
||||||
Timestamp
|
Timestamp
|
||||||
};
|
};
|
||||||
use tokio::time::{
|
use tokio::time::{
|
||||||
@ -33,6 +33,7 @@ use tokio::time::{
|
|||||||
interval
|
interval
|
||||||
};
|
};
|
||||||
|
|
||||||
|
const TASK_NAME: &str = "RSS";
|
||||||
static REDIS_EXPIRY_SECS: i64 = 7200;
|
static REDIS_EXPIRY_SECS: i64 = 7200;
|
||||||
static REDIS_SERVICE: OnceCell<Arc<RedisController>> = OnceCell::new();
|
static REDIS_SERVICE: OnceCell<Arc<RedisController>> = OnceCell::new();
|
||||||
|
|
||||||
@ -48,12 +49,6 @@ async fn get_redis() -> Arc<RedisController> {
|
|||||||
REDIS_SERVICE.get().unwrap().clone()
|
REDIS_SERVICE.get().unwrap().clone()
|
||||||
}
|
}
|
||||||
|
|
||||||
// Moved up here as a copy-paste
|
|
||||||
|
|
||||||
// This is for building up the embed with the feed data
|
|
||||||
// std::fs::File::create("rss_name.log").unwrap();
|
|
||||||
// std::fs::write("rss_name.log", format!("{:#?}", feed))?;
|
|
||||||
|
|
||||||
fn format_href_to_discord(input: &str) -> String {
|
fn format_href_to_discord(input: &str) -> String {
|
||||||
let re = Regex::new(r#"<a href="([^"]+)">([^<]+)</a>"#).unwrap();
|
let re = Regex::new(r#"<a href="([^"]+)">([^<]+)</a>"#).unwrap();
|
||||||
re.replace_all(input, r"[$2]($1)").to_string()
|
re.replace_all(input, r"[$2]($1)").to_string()
|
||||||
@ -62,23 +57,17 @@ fn format_href_to_discord(input: &str) -> String {
|
|||||||
fn format_html_to_discord(input: String) -> String {
|
fn format_html_to_discord(input: String) -> String {
|
||||||
let mut output = input;
|
let mut output = input;
|
||||||
|
|
||||||
// Replace all instances of <p> with newlines
|
// Replace all instances of <p> and </p> with newlines
|
||||||
output = Regex::new(r#"<\s*p\s*>"#).unwrap().replace_all(&output, "\n").to_string();
|
output = Regex::new(r#"</?\s*p\s*>"#).unwrap().replace_all(&output, "\n").to_string();
|
||||||
output = Regex::new(r#"<\s*/\s*p\s*>"#).unwrap().replace_all(&output, "\n").to_string();
|
|
||||||
|
|
||||||
// Replace all instances of <br> and <br /> with newlines
|
// Replace all instances of <br> and <br /> with newlines
|
||||||
output = Regex::new(r#"<\s*br\s*>"#).unwrap().replace_all(&output, "\n").to_string();
|
output = Regex::new(r#"<\s*br\s*/?\s*>"#).unwrap().replace_all(&output, "\n").to_string();
|
||||||
output = Regex::new(r#"<\s*br\s*/\s*>"#).unwrap().replace_all(&output, "\n").to_string();
|
|
||||||
|
|
||||||
// Replace all instances of <strong> with **
|
// Replace all instances of <strong> with **
|
||||||
output = Regex::new(r#"<\s*strong\s*>"#).unwrap().replace_all(&output, "**").to_string();
|
output = Regex::new(r#"</?\s*strong\s*>"#).unwrap().replace_all(&output, "**").to_string();
|
||||||
output = Regex::new(r#"<\s*/\s*strong\s*>"#).unwrap().replace_all(&output, "**").to_string();
|
|
||||||
|
|
||||||
// Replace all instances of <var> and <small> with nothing
|
// Replace all instances of <var> and <small> with nothing
|
||||||
output = Regex::new(r#"<\s*var\s*>"#).unwrap().replace_all(&output, "").to_string();
|
output = Regex::new(r#"</?\s*(var|small)\s*>"#).unwrap().replace_all(&output, "").to_string();
|
||||||
output = Regex::new(r#"<\s*/\s*var\s*>"#).unwrap().replace_all(&output, "").to_string();
|
|
||||||
output = Regex::new(r#"<\s*small\s*>"#).unwrap().replace_all(&output, "").to_string();
|
|
||||||
output = Regex::new(r#"<\s*/\s*small\s*>"#).unwrap().replace_all(&output, "").to_string();
|
|
||||||
|
|
||||||
// Remove any other HTML tags
|
// Remove any other HTML tags
|
||||||
output = Regex::new(r#"<[^>]+>"#).unwrap().replace_all(&output, "").to_string();
|
output = Regex::new(r#"<[^>]+>"#).unwrap().replace_all(&output, "").to_string();
|
||||||
@ -132,410 +121,21 @@ fn trim_old_content(s: &str) -> String {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn esxi_embed() -> Result<Option<CreateEmbed>, Error> {
|
|
||||||
let redis = get_redis().await;
|
|
||||||
let rkey = "RSS_ESXi";
|
|
||||||
let url = "https://esxi-patches.v-front.de/atom/ESXi-7.0.0.xml";
|
|
||||||
|
|
||||||
let res = fetch_feed(url).await?;
|
|
||||||
let data = res.text().await?;
|
|
||||||
let cursor = Cursor::new(data);
|
|
||||||
|
|
||||||
let feed = parse(cursor).unwrap();
|
|
||||||
let home_page = feed.links[0].clone().href;
|
|
||||||
let article = feed.entries[0].clone();
|
|
||||||
|
|
||||||
fn get_patch_version(input: &str) -> Option<String> {
|
|
||||||
let re = Regex::new(r#"(?i)Update\s+([0-9]+)([a-z]?)"#).unwrap();
|
|
||||||
|
|
||||||
if let Some(caps) = re.captures(input) {
|
|
||||||
let update_num = caps[1].to_string();
|
|
||||||
let letter = caps.get(2).map_or("", |m| m.as_str());
|
|
||||||
Some(format!("Update {}{}", update_num, letter))
|
|
||||||
} else {
|
|
||||||
None
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
let cached_patch = redis.get(&rkey).await.unwrap().unwrap_or_default();
|
|
||||||
|
|
||||||
if cached_patch.is_empty() {
|
|
||||||
redis.set(&rkey, &article.categories[3].term).await.unwrap();
|
|
||||||
if let Err(y) = redis.expire(&rkey, REDIS_EXPIRY_SECS).await {
|
|
||||||
task_err("RSS", format!("[RedisExpiry]: {}", y).as_str());
|
|
||||||
}
|
|
||||||
return Ok(None);
|
|
||||||
}
|
|
||||||
|
|
||||||
if let Some(patch) = get_patch_version(&article.categories[3].term) {
|
|
||||||
if patch == cached_patch {
|
|
||||||
return Ok(None);
|
|
||||||
} else {
|
|
||||||
save_to_redis(&rkey, &article.categories[3].term).await?;
|
|
||||||
Ok(Some(CreateEmbed::new()
|
|
||||||
.color(0x4EFBCB)
|
|
||||||
.author(CreateEmbedAuthor::new(feed.title.unwrap().content).url(home_page))
|
|
||||||
.thumbnail(feed.logo.unwrap().uri)
|
|
||||||
.description(format!(
|
|
||||||
"{} {} for {} {} has been rolled out!\n{}",
|
|
||||||
article.categories[2].term,
|
|
||||||
article.categories[3].term,
|
|
||||||
article.categories[0].term,
|
|
||||||
article.categories[1].term,
|
|
||||||
format_href_to_discord(article.summary.unwrap().content.as_str())
|
|
||||||
))
|
|
||||||
.timestamp(Timestamp::from(article.updated.unwrap())))
|
|
||||||
)
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
task_err("RSS:ESXi", &format!("Article term does not match the expected RegEx pattern! ({})", article.categories[3].term.as_str()));
|
|
||||||
Ok(None)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn gportal_embed() -> Result<Option<CreateEmbed>, Error> {
|
|
||||||
let redis = get_redis().await;
|
|
||||||
let rkey = "RSS_GPortal";
|
|
||||||
let rkey_content = format!("{}_Content", rkey);
|
|
||||||
let url = "https://status.g-portal.com/history.atom";
|
|
||||||
|
|
||||||
let res = fetch_feed(url).await?;
|
|
||||||
let data = res.text().await?;
|
|
||||||
let cursor = Cursor::new(data);
|
|
||||||
|
|
||||||
let feed = parse(cursor).unwrap();
|
|
||||||
let incident_page = feed.links[0].clone().href;
|
|
||||||
let article = feed.entries[0].clone();
|
|
||||||
|
|
||||||
fn get_incident_id(input: &str) -> Option<String> {
|
|
||||||
let re = Regex::new(r#"/incidents/([a-zA-Z0-9]+)$"#).unwrap();
|
|
||||||
|
|
||||||
if let Some(caps) = re.captures(input) {
|
|
||||||
Some(caps[1].to_string())
|
|
||||||
} else {
|
|
||||||
None
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
let cached_incident = redis.get(&rkey).await.unwrap().unwrap_or_default();
|
|
||||||
let new_content = format_html_to_discord(article.content.unwrap().body.unwrap());
|
|
||||||
|
|
||||||
let color: u32;
|
|
||||||
let update_patt = Regex::new(r"(?i)\bupdate\b").unwrap();
|
|
||||||
let investigating_patt = Regex::new(r"(?i)\binvestigating\b").unwrap();
|
|
||||||
let monitoring_patt = Regex::new(r"(?i)\bmonitoring\b").unwrap();
|
|
||||||
let resolved_patt = Regex::new(r"(?i)\bresolved\b").unwrap();
|
|
||||||
let date_patt = Regex::new(r"\b[A-Z][a-z]{2} \d{2}, \d{2}:\d{2} UTC\b").unwrap();
|
|
||||||
|
|
||||||
let first_entry = date_patt.split(&new_content).next().unwrap_or(&new_content);
|
|
||||||
|
|
||||||
if update_patt.is_match(&first_entry) {
|
|
||||||
color = 0xFFAD33;
|
|
||||||
} else if investigating_patt.is_match(&first_entry) {
|
|
||||||
color = 0x16AAEB;
|
|
||||||
} else if monitoring_patt.is_match(&first_entry) {
|
|
||||||
color = 0x243C32;
|
|
||||||
} else if resolved_patt.is_match(&first_entry) {
|
|
||||||
color = 0x57F287;
|
|
||||||
} else {
|
|
||||||
color = 0x243C32;
|
|
||||||
}
|
|
||||||
|
|
||||||
if cached_incident.is_empty() {
|
|
||||||
redis.set(&rkey, &get_incident_id(&article.links[0].href).unwrap()).await.unwrap();
|
|
||||||
redis.set(&rkey_content, &new_content).await.unwrap();
|
|
||||||
if let Err(y) = redis.expire(&rkey, REDIS_EXPIRY_SECS).await {
|
|
||||||
task_err("RSS", format!("[RedisExpiry]: {}", y).as_str());
|
|
||||||
}
|
|
||||||
return Ok(None);
|
|
||||||
}
|
|
||||||
|
|
||||||
if let Some(incident) = get_incident_id(&article.links[0].href) {
|
|
||||||
if incident == cached_incident {
|
|
||||||
let cached_content: String = redis.get(&rkey_content).await.unwrap().unwrap_or_default();
|
|
||||||
if cached_content == new_content {
|
|
||||||
return Ok(None);
|
|
||||||
} else {
|
|
||||||
redis.set(&rkey_content, &new_content).await.unwrap();
|
|
||||||
redis.expire(&rkey_content, 21600).await.unwrap();
|
|
||||||
return Ok(Some(embed(
|
|
||||||
color,
|
|
||||||
article.title.unwrap().content,
|
|
||||||
incident_page,
|
|
||||||
trim_old_content(&new_content),
|
|
||||||
Timestamp::from(article.updated.unwrap())
|
|
||||||
)));
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
save_to_redis(&rkey, &incident).await?;
|
|
||||||
redis.set(&rkey_content, &new_content).await.unwrap();
|
|
||||||
return Ok(Some(embed(
|
|
||||||
color,
|
|
||||||
article.title.unwrap().content,
|
|
||||||
incident_page,
|
|
||||||
trim_old_content(&new_content),
|
|
||||||
Timestamp::from(article.updated.unwrap())
|
|
||||||
)));
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
task_err("RSS:GPortal", &format!("Incident ID does not match the expected RegEx pattern! ({})", &article.links[0].href));
|
|
||||||
Ok(None)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn github_embed() -> Result<Option<CreateEmbed>, Error> {
|
|
||||||
let redis = get_redis().await;
|
|
||||||
let rkey = "RSS_GitHub";
|
|
||||||
let rkey_content = format!("{}_Content", rkey);
|
|
||||||
let url = "https://www.githubstatus.com/history.atom";
|
|
||||||
|
|
||||||
let res = fetch_feed(url).await?;
|
|
||||||
let data = res.text().await?;
|
|
||||||
let cursor = Cursor::new(data);
|
|
||||||
|
|
||||||
let feed = parse(cursor).unwrap();
|
|
||||||
let incident_page = feed.entries[0].links[0].clone().href;
|
|
||||||
let article = feed.entries[0].clone();
|
|
||||||
|
|
||||||
fn get_incident_id(input: &str) -> Option<String> {
|
|
||||||
let re = Regex::new(r#"/incidents/([a-zA-Z0-9]+)$"#).unwrap();
|
|
||||||
|
|
||||||
if let Some(caps) = re.captures(input) {
|
|
||||||
Some(caps[1].to_string())
|
|
||||||
} else {
|
|
||||||
None
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
let cached_incident = redis.get(&rkey).await.unwrap().unwrap_or_default();
|
|
||||||
let new_content = format_html_to_discord(article.content.unwrap().body.unwrap());
|
|
||||||
|
|
||||||
let color: u32;
|
|
||||||
let update_patt = Regex::new(r"(?i)\bupdate\b").unwrap();
|
|
||||||
let resolved_patt = Regex::new(r"(?i)\bresolved\b").unwrap();
|
|
||||||
let date_patt = Regex::new(r"\b[A-Z][a-z]{2} \d{2}, \d{2}:\d{2} UTC\b").unwrap();
|
|
||||||
|
|
||||||
let first_entry = date_patt.split(&new_content).next().unwrap_or(&new_content);
|
|
||||||
|
|
||||||
if update_patt.is_match(&first_entry) {
|
|
||||||
color = 0xFFAD33;
|
|
||||||
} else if resolved_patt.is_match(&first_entry) {
|
|
||||||
color = 0x57F287;
|
|
||||||
} else {
|
|
||||||
color = 0x243C32;
|
|
||||||
}
|
|
||||||
|
|
||||||
if cached_incident.is_empty() {
|
|
||||||
redis.set(&rkey, &get_incident_id(&article.links[0].href).unwrap()).await.unwrap();
|
|
||||||
redis.set(&rkey_content, &new_content).await.unwrap();
|
|
||||||
if let Err(y) = redis.expire(&rkey, REDIS_EXPIRY_SECS).await {
|
|
||||||
task_err("RSS", format!("[RedisExpiry]: {}", y).as_str());
|
|
||||||
}
|
|
||||||
return Ok(None);
|
|
||||||
}
|
|
||||||
|
|
||||||
if let Some(incident) = get_incident_id(&article.links[0].href) {
|
|
||||||
if incident == cached_incident {
|
|
||||||
let cached_content: String = redis.get(&rkey_content).await.unwrap().unwrap_or_default();
|
|
||||||
if cached_content == new_content {
|
|
||||||
return Ok(None);
|
|
||||||
} else {
|
|
||||||
redis.set(&rkey_content, &new_content).await.unwrap();
|
|
||||||
redis.expire(&rkey_content, 21600).await.unwrap();
|
|
||||||
return Ok(Some(embed(
|
|
||||||
color,
|
|
||||||
article.title.unwrap().content,
|
|
||||||
incident_page,
|
|
||||||
trim_old_content(&new_content),
|
|
||||||
Timestamp::from(article.updated.unwrap())
|
|
||||||
)));
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
save_to_redis(&rkey, &incident).await?;
|
|
||||||
redis.set(&rkey_content, &new_content).await.unwrap();
|
|
||||||
return Ok(Some(embed(
|
|
||||||
color,
|
|
||||||
article.title.unwrap().content,
|
|
||||||
incident_page,
|
|
||||||
trim_old_content(&new_content),
|
|
||||||
Timestamp::from(article.updated.unwrap())
|
|
||||||
)));
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
task_err("RSS:GitHub", &format!("Incident ID does not match the expected RegEx pattern! ({})", &article.links[0].href));
|
|
||||||
Ok(None)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn rust_message() -> Result<Option<String>, Error> {
|
|
||||||
let redis = get_redis().await;
|
|
||||||
let rkey = "RSS_RustBlog";
|
|
||||||
let url = "https://blog.rust-lang.org/feed.xml";
|
|
||||||
|
|
||||||
let res = fetch_feed(url).await?;
|
|
||||||
let data = res.text().await?;
|
|
||||||
let cursor = Cursor::new(data);
|
|
||||||
|
|
||||||
let feed = parse(cursor).unwrap();
|
|
||||||
let article = feed.entries[0].clone();
|
|
||||||
let article_id = article.id.clone();
|
|
||||||
|
|
||||||
fn get_blog_title(input: String) -> Option<String> {
|
|
||||||
let re = Regex::new(r"https://blog\.rust-lang\.org/(\d{4}/\d{2}/\d{2}/[^/]+)").unwrap();
|
|
||||||
re.captures(input.as_str()).and_then(|caps| caps.get(1).map(|m| m.as_str().to_string()))
|
|
||||||
}
|
|
||||||
|
|
||||||
let cached_blog = redis.get(&rkey).await.unwrap().unwrap_or_default();
|
|
||||||
|
|
||||||
if cached_blog.is_empty() {
|
|
||||||
redis.set(&rkey, get_blog_title(article.id).unwrap().as_str()).await.unwrap();
|
|
||||||
if let Err(y) = redis.expire(&rkey, REDIS_EXPIRY_SECS).await {
|
|
||||||
task_err("RSS", format!("[RedisExpiry]: {}", y).as_str());
|
|
||||||
}
|
|
||||||
return Ok(None);
|
|
||||||
}
|
|
||||||
|
|
||||||
if let Some(blog) = get_blog_title(article.id) {
|
|
||||||
if blog == cached_blog {
|
|
||||||
return Ok(None);
|
|
||||||
} else {
|
|
||||||
save_to_redis(&rkey, &blog).await?;
|
|
||||||
Ok(Some(format!("Rust Team has put out a new article!\n**[{}](<{}>)**", article.links[0].title.clone().unwrap(), article.links[0].href)))
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
task_err("RSS:RustBlog", &format!("Article URL does not match the expected RegEx pattern! ({})", article_id));
|
|
||||||
Ok(None)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub async fn rss(ctx: Arc<Context>) -> Result<(), Error> {
|
pub async fn rss(ctx: Arc<Context>) -> Result<(), Error> {
|
||||||
let task_name = "RSS";
|
|
||||||
#[cfg(feature = "production")]
|
#[cfg(feature = "production")]
|
||||||
let mut interval = interval(Duration::from_secs(300)); // Check feeds every 5 mins
|
let mut interval = interval(Duration::from_secs(300)); // Check feeds every 5 mins
|
||||||
#[cfg(not(feature = "production"))]
|
#[cfg(not(feature = "production"))]
|
||||||
let mut interval = interval(Duration::from_secs(30)); // Check feeds every 30 secs
|
let mut interval = interval(Duration::from_secs(30)); // Check feeds every 30 secs
|
||||||
task_info(&task_name, "Task loaded!");
|
let mut first_run = true;
|
||||||
|
task_info(&TASK_NAME, "Task loaded!");
|
||||||
|
|
||||||
loop {
|
loop {
|
||||||
interval.tick().await;
|
interval.tick().await;
|
||||||
let mut log_msgs: Vec<String> = Vec::new();
|
|
||||||
|
|
||||||
match esxi_embed().await {
|
if first_run {
|
||||||
Ok(Some(embed)) => {
|
task_info(&format!("{TASK_NAME}:Processor"), "Starting up!");
|
||||||
ChannelId::new(BINARY_PROPERTIES.rss_channel).send_message(&ctx.http, CreateMessage::new().add_embed(embed)).await.unwrap();
|
first_run = false;
|
||||||
},
|
|
||||||
Ok(None) => (),
|
|
||||||
Err(y) => {
|
|
||||||
log_msgs.push(format!("**[RSS:ESXi:Error]:** Feed failed with the following error:```\n{}\n```", y));
|
|
||||||
task_err(&task_name, &y.to_string())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
match gportal_embed().await {
|
|
||||||
Ok(Some(embed)) => {
|
|
||||||
let redis = get_redis().await;
|
|
||||||
let rkey = "RSS_GPortal_MsgID";
|
|
||||||
let channel = ChannelId::new(BINARY_PROPERTIES.rss_channel);
|
|
||||||
|
|
||||||
// Check if the message ID is in Redis
|
|
||||||
match redis.get(&rkey).await {
|
|
||||||
Ok(Some(msg_id_key)) => {
|
|
||||||
// Fetch the cached content
|
|
||||||
let cached_content: Option<String> = redis.get("RSS_GPortal_Content").await.unwrap_or(None);
|
|
||||||
|
|
||||||
if let Ok(msg_id) = msg_id_key.parse::<u64>() {
|
|
||||||
// Attempt to edit the message
|
|
||||||
if let Ok(mut message) = channel.message(&ctx.http, msg_id).await {
|
|
||||||
let new_desc = message.embeds[0].description.clone().unwrap();
|
|
||||||
|
|
||||||
if cached_content.as_deref() != Some(&new_desc) {
|
|
||||||
message.edit(&ctx.http, EditMessage::new().embed(embed)).await.unwrap();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
// If the message is invalid ID, send a new message instead
|
|
||||||
let message = channel.send_message(&ctx.http, CreateMessage::new()
|
|
||||||
.content("*Uh-oh! G-Portal is having issues!*").add_embed(embed)
|
|
||||||
).await.unwrap();
|
|
||||||
redis.set(&rkey, &message.id.to_string()).await.unwrap();
|
|
||||||
redis.expire(&rkey, 36000).await.unwrap();
|
|
||||||
}
|
|
||||||
},
|
|
||||||
Ok(None) | Err(_) => {
|
|
||||||
// If the message is not found, send a new message instead
|
|
||||||
let message = channel.send_message(&ctx.http, CreateMessage::new()
|
|
||||||
.content("*Uh-oh! G-Portal is having issues!*").add_embed(embed)
|
|
||||||
).await.unwrap();
|
|
||||||
redis.set(&rkey, &message.id.to_string()).await.unwrap();
|
|
||||||
redis.expire(&rkey, 36000).await.unwrap();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
},
|
|
||||||
Ok(None) => (),
|
|
||||||
Err(y) => {
|
|
||||||
log_msgs.push(format!("**[RSS:GPortal:Error]:** Feed failed with the following error:```\n{}\n```", y));
|
|
||||||
task_err(&task_name, &y.to_string())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
match github_embed().await {
|
|
||||||
Ok(Some(embed)) => {
|
|
||||||
let redis = get_redis().await;
|
|
||||||
let rkey = "RSS_GitHub_MsgID";
|
|
||||||
let channel = ChannelId::new(BINARY_PROPERTIES.rss_channel);
|
|
||||||
|
|
||||||
// Check if the message ID is in Redis
|
|
||||||
match redis.get(&rkey).await {
|
|
||||||
Ok(Some(msg_id_key)) => {
|
|
||||||
// Fetch the cached content
|
|
||||||
let cached_content: Option<String> = redis.get("RSS_GitHub_Content").await.unwrap_or(None);
|
|
||||||
|
|
||||||
if let Ok(msg_id) = msg_id_key.parse::<u64>() {
|
|
||||||
// Attempt to edit the message
|
|
||||||
if let Ok(mut message) = channel.message(&ctx.http, msg_id).await {
|
|
||||||
let new_desc = message.embeds[0].description.clone().unwrap();
|
|
||||||
|
|
||||||
if cached_content.as_deref() != Some(&new_desc) {
|
|
||||||
message.edit(&ctx.http, EditMessage::new().embed(embed)).await.unwrap();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
// If the message is invalid ID, send a new message instead
|
|
||||||
let message = channel.send_message(&ctx.http, CreateMessage::new().add_embed(embed)).await.unwrap();
|
|
||||||
redis.set(&rkey, &message.id.to_string()).await.unwrap();
|
|
||||||
redis.expire(&rkey, 36000).await.unwrap();
|
|
||||||
}
|
|
||||||
},
|
|
||||||
Ok(None) | Err(_) => {
|
|
||||||
// If the message is not found, send a new message instead
|
|
||||||
let message = channel.send_message(&ctx.http, CreateMessage::new().add_embed(embed)).await.unwrap();
|
|
||||||
redis.set(&rkey, &message.id.to_string()).await.unwrap();
|
|
||||||
redis.expire(&rkey, 36000).await.unwrap();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
},
|
|
||||||
Ok(None) => (),
|
|
||||||
Err(y) => {
|
|
||||||
log_msgs.push(format!("**[RSS:GitHub:Error]:** Feed failed with the following error:```\n{}\n```", y));
|
|
||||||
task_err(&task_name, &y.to_string())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
match rust_message().await {
|
|
||||||
Ok(Some(content)) => {
|
|
||||||
ChannelId::new(BINARY_PROPERTIES.rss_channel).send_message(&ctx.http, CreateMessage::new().content(content)).await.unwrap();
|
|
||||||
},
|
|
||||||
Ok(None) => (),
|
|
||||||
Err(y) => {
|
|
||||||
log_msgs.push(format!("**[RSS:RustBlog:Error]:** Feed failed with the following error:```\n{}\n```", y));
|
|
||||||
task_err(&task_name, &y.to_string())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if !log_msgs.is_empty() {
|
|
||||||
ChannelId::new(BINARY_PROPERTIES.kon_logs).send_message(
|
|
||||||
&ctx.http, CreateMessage::new().content(log_msgs.join("\n"))
|
|
||||||
).await.unwrap();
|
|
||||||
}
|
}
|
||||||
|
processor::feed_processor(&ctx).await;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
79
src/internals/tasks/rss/esxi.rs
Normal file
79
src/internals/tasks/rss/esxi.rs
Normal file
@ -0,0 +1,79 @@
|
|||||||
|
use crate::Error;
|
||||||
|
use super::{
|
||||||
|
super::task_err,
|
||||||
|
REDIS_EXPIRY_SECS,
|
||||||
|
get_redis,
|
||||||
|
save_to_redis,
|
||||||
|
fetch_feed,
|
||||||
|
parse,
|
||||||
|
format_href_to_discord
|
||||||
|
};
|
||||||
|
|
||||||
|
use std::io::Cursor;
|
||||||
|
use regex::Regex;
|
||||||
|
use poise::serenity_prelude::{
|
||||||
|
CreateEmbed,
|
||||||
|
CreateEmbedAuthor,
|
||||||
|
Timestamp
|
||||||
|
};
|
||||||
|
|
||||||
|
pub async fn esxi_embed() -> Result<Option<CreateEmbed>, Error> {
|
||||||
|
let redis = get_redis().await;
|
||||||
|
let rkey = "RSS_ESXi";
|
||||||
|
let url = "https://esxi-patches.v-front.de/atom/ESXi-7.0.0.xml";
|
||||||
|
|
||||||
|
let res = fetch_feed(url).await?;
|
||||||
|
let data = res.text().await?;
|
||||||
|
let cursor = Cursor::new(data);
|
||||||
|
|
||||||
|
let feed = parse(cursor).unwrap();
|
||||||
|
let home_page = feed.links[0].clone().href;
|
||||||
|
let article = feed.entries[0].clone();
|
||||||
|
|
||||||
|
fn get_patch_version(input: &str) -> Option<String> {
|
||||||
|
let re = Regex::new(r#"(?i)Update\s+([0-9]+)([a-z]?)"#).unwrap();
|
||||||
|
|
||||||
|
if let Some(caps) = re.captures(input) {
|
||||||
|
let update_num = caps[1].to_string();
|
||||||
|
let letter = caps.get(2).map_or("", |m| m.as_str());
|
||||||
|
Some(format!("Update {}{}", update_num, letter))
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let cached_patch = redis.get(&rkey).await.unwrap().unwrap_or_default();
|
||||||
|
|
||||||
|
if cached_patch.is_empty() {
|
||||||
|
redis.set(&rkey, &article.categories[3].term).await.unwrap();
|
||||||
|
if let Err(y) = redis.expire(&rkey, REDIS_EXPIRY_SECS).await {
|
||||||
|
task_err("RSS", format!("[RedisExpiry]: {}", y).as_str());
|
||||||
|
}
|
||||||
|
return Ok(None);
|
||||||
|
}
|
||||||
|
|
||||||
|
if let Some(patch) = get_patch_version(&article.categories[3].term) {
|
||||||
|
if patch == cached_patch {
|
||||||
|
return Ok(None);
|
||||||
|
} else {
|
||||||
|
save_to_redis(&rkey, &article.categories[3].term).await?;
|
||||||
|
Ok(Some(CreateEmbed::new()
|
||||||
|
.color(0x4EFBCB)
|
||||||
|
.author(CreateEmbedAuthor::new(feed.title.unwrap().content).url(home_page))
|
||||||
|
.thumbnail(feed.logo.unwrap().uri)
|
||||||
|
.description(format!(
|
||||||
|
"{} {} for {} {} has been rolled out!\n{}",
|
||||||
|
article.categories[2].term,
|
||||||
|
article.categories[3].term,
|
||||||
|
article.categories[0].term,
|
||||||
|
article.categories[1].term,
|
||||||
|
format_href_to_discord(article.summary.unwrap().content.as_str())
|
||||||
|
))
|
||||||
|
.timestamp(Timestamp::from(article.updated.unwrap())))
|
||||||
|
)
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
task_err("RSS:ESXi", &format!("Article term does not match the expected RegEx pattern! ({})", article.categories[3].term.as_str()));
|
||||||
|
Ok(None)
|
||||||
|
}
|
||||||
|
}
|
103
src/internals/tasks/rss/github.rs
Normal file
103
src/internals/tasks/rss/github.rs
Normal file
@ -0,0 +1,103 @@
|
|||||||
|
use crate::Error;
|
||||||
|
use super::{
|
||||||
|
super::task_err,
|
||||||
|
REDIS_EXPIRY_SECS,
|
||||||
|
get_redis,
|
||||||
|
save_to_redis,
|
||||||
|
fetch_feed,
|
||||||
|
parse,
|
||||||
|
embed,
|
||||||
|
trim_old_content,
|
||||||
|
format_html_to_discord
|
||||||
|
};
|
||||||
|
|
||||||
|
use std::io::Cursor;
|
||||||
|
use regex::Regex;
|
||||||
|
use poise::serenity_prelude::{
|
||||||
|
CreateEmbed,
|
||||||
|
Timestamp
|
||||||
|
};
|
||||||
|
|
||||||
|
pub async fn github_embed() -> Result<Option<CreateEmbed>, Error> {
|
||||||
|
let redis = get_redis().await;
|
||||||
|
let rkey = "RSS_GitHub";
|
||||||
|
let rkey_content = format!("{}_Content", rkey);
|
||||||
|
let url = "https://www.githubstatus.com/history.atom";
|
||||||
|
|
||||||
|
let res = fetch_feed(url).await?;
|
||||||
|
let data = res.text().await?;
|
||||||
|
let cursor = Cursor::new(data);
|
||||||
|
|
||||||
|
let feed = parse(cursor).unwrap();
|
||||||
|
let incident_page = feed.entries[0].links[0].clone().href;
|
||||||
|
let article = feed.entries[0].clone();
|
||||||
|
|
||||||
|
fn get_incident_id(input: &str) -> Option<String> {
|
||||||
|
let re = Regex::new(r#"/incidents/([a-zA-Z0-9]+)$"#).unwrap();
|
||||||
|
|
||||||
|
if let Some(caps) = re.captures(input) {
|
||||||
|
Some(caps[1].to_string())
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let cached_incident = redis.get(&rkey).await.unwrap().unwrap_or_default();
|
||||||
|
let new_content = format_html_to_discord(article.content.unwrap().body.unwrap());
|
||||||
|
|
||||||
|
let color: u32;
|
||||||
|
let update_patt = Regex::new(r"(?i)\bupdate\b").unwrap();
|
||||||
|
let resolved_patt = Regex::new(r"(?i)\bresolved\b").unwrap();
|
||||||
|
let date_patt = Regex::new(r"\b[A-Z][a-z]{2} \d{2}, \d{2}:\d{2} UTC\b").unwrap();
|
||||||
|
|
||||||
|
let first_entry = date_patt.split(&new_content).next().unwrap_or(&new_content);
|
||||||
|
|
||||||
|
if update_patt.is_match(&first_entry) {
|
||||||
|
color = 0xFFAD33;
|
||||||
|
} else if resolved_patt.is_match(&first_entry) {
|
||||||
|
color = 0x57F287;
|
||||||
|
} else {
|
||||||
|
color = 0x243C32;
|
||||||
|
}
|
||||||
|
|
||||||
|
if cached_incident.is_empty() {
|
||||||
|
redis.set(&rkey, &get_incident_id(&article.links[0].href).unwrap()).await.unwrap();
|
||||||
|
redis.set(&rkey_content, &new_content).await.unwrap();
|
||||||
|
if let Err(y) = redis.expire(&rkey, REDIS_EXPIRY_SECS).await {
|
||||||
|
task_err("RSS", format!("[RedisExpiry]: {}", y).as_str());
|
||||||
|
}
|
||||||
|
return Ok(None);
|
||||||
|
}
|
||||||
|
|
||||||
|
if let Some(incident) = get_incident_id(&article.links[0].href) {
|
||||||
|
if incident == cached_incident {
|
||||||
|
let cached_content: String = redis.get(&rkey_content).await.unwrap().unwrap_or_default();
|
||||||
|
if cached_content == new_content {
|
||||||
|
return Ok(None);
|
||||||
|
} else {
|
||||||
|
redis.set(&rkey_content, &new_content).await.unwrap();
|
||||||
|
redis.expire(&rkey_content, 21600).await.unwrap();
|
||||||
|
return Ok(Some(embed(
|
||||||
|
color,
|
||||||
|
article.title.unwrap().content,
|
||||||
|
incident_page,
|
||||||
|
trim_old_content(&new_content),
|
||||||
|
Timestamp::from(article.updated.unwrap())
|
||||||
|
)));
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
save_to_redis(&rkey, &incident).await?;
|
||||||
|
redis.set(&rkey_content, &new_content).await.unwrap();
|
||||||
|
return Ok(Some(embed(
|
||||||
|
color,
|
||||||
|
article.title.unwrap().content,
|
||||||
|
incident_page,
|
||||||
|
trim_old_content(&new_content),
|
||||||
|
Timestamp::from(article.updated.unwrap())
|
||||||
|
)));
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
task_err("RSS:GitHub", &format!("Incident ID does not match the expected RegEx pattern! ({})", &article.links[0].href));
|
||||||
|
Ok(None)
|
||||||
|
}
|
||||||
|
}
|
109
src/internals/tasks/rss/gportal.rs
Normal file
109
src/internals/tasks/rss/gportal.rs
Normal file
@ -0,0 +1,109 @@
|
|||||||
|
use crate::Error;
|
||||||
|
use super::{
|
||||||
|
super::task_err,
|
||||||
|
REDIS_EXPIRY_SECS,
|
||||||
|
get_redis,
|
||||||
|
save_to_redis,
|
||||||
|
fetch_feed,
|
||||||
|
parse,
|
||||||
|
embed,
|
||||||
|
trim_old_content,
|
||||||
|
format_html_to_discord
|
||||||
|
};
|
||||||
|
|
||||||
|
use std::io::Cursor;
|
||||||
|
use regex::Regex;
|
||||||
|
use poise::serenity_prelude::{
|
||||||
|
CreateEmbed,
|
||||||
|
Timestamp
|
||||||
|
};
|
||||||
|
|
||||||
|
pub async fn gportal_embed() -> Result<Option<CreateEmbed>, Error> {
|
||||||
|
let redis = get_redis().await;
|
||||||
|
let rkey = "RSS_GPortal";
|
||||||
|
let rkey_content = format!("{}_Content", rkey);
|
||||||
|
let url = "https://status.g-portal.com/history.atom";
|
||||||
|
|
||||||
|
let res = fetch_feed(url).await?;
|
||||||
|
let data = res.text().await?;
|
||||||
|
let cursor = Cursor::new(data);
|
||||||
|
|
||||||
|
let feed = parse(cursor).unwrap();
|
||||||
|
let incident_page = feed.links[0].clone().href;
|
||||||
|
let article = feed.entries[0].clone();
|
||||||
|
|
||||||
|
fn get_incident_id(input: &str) -> Option<String> {
|
||||||
|
let re = Regex::new(r#"/incidents/([a-zA-Z0-9]+)$"#).unwrap();
|
||||||
|
|
||||||
|
if let Some(caps) = re.captures(input) {
|
||||||
|
Some(caps[1].to_string())
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let cached_incident = redis.get(&rkey).await.unwrap().unwrap_or_default();
|
||||||
|
let new_content = format_html_to_discord(article.content.unwrap().body.unwrap());
|
||||||
|
|
||||||
|
let color: u32;
|
||||||
|
let update_patt = Regex::new(r"(?i)\bupdate\b").unwrap();
|
||||||
|
let investigating_patt = Regex::new(r"(?i)\binvestigating\b").unwrap();
|
||||||
|
let monitoring_patt = Regex::new(r"(?i)\bmonitoring\b").unwrap();
|
||||||
|
let resolved_patt = Regex::new(r"(?i)\bresolved\b").unwrap();
|
||||||
|
let date_patt = Regex::new(r"\b[A-Z][a-z]{2} \d{2}, \d{2}:\d{2} UTC\b").unwrap();
|
||||||
|
|
||||||
|
let first_entry = date_patt.split(&new_content).next().unwrap_or(&new_content);
|
||||||
|
|
||||||
|
if update_patt.is_match(&first_entry) {
|
||||||
|
color = 0xFFAD33;
|
||||||
|
} else if investigating_patt.is_match(&first_entry) {
|
||||||
|
color = 0x16AAEB;
|
||||||
|
} else if monitoring_patt.is_match(&first_entry) {
|
||||||
|
color = 0x243C32;
|
||||||
|
} else if resolved_patt.is_match(&first_entry) {
|
||||||
|
color = 0x57F287;
|
||||||
|
} else {
|
||||||
|
color = 0x243C32;
|
||||||
|
}
|
||||||
|
|
||||||
|
if cached_incident.is_empty() {
|
||||||
|
redis.set(&rkey, &get_incident_id(&article.links[0].href).unwrap()).await.unwrap();
|
||||||
|
redis.set(&rkey_content, &new_content).await.unwrap();
|
||||||
|
if let Err(y) = redis.expire(&rkey, REDIS_EXPIRY_SECS).await {
|
||||||
|
task_err("RSS", format!("[RedisExpiry]: {}", y).as_str());
|
||||||
|
}
|
||||||
|
return Ok(None);
|
||||||
|
}
|
||||||
|
|
||||||
|
if let Some(incident) = get_incident_id(&article.links[0].href) {
|
||||||
|
if incident == cached_incident {
|
||||||
|
let cached_content: String = redis.get(&rkey_content).await.unwrap().unwrap_or_default();
|
||||||
|
if cached_content == new_content {
|
||||||
|
return Ok(None);
|
||||||
|
} else {
|
||||||
|
redis.set(&rkey_content, &new_content).await.unwrap();
|
||||||
|
redis.expire(&rkey_content, 21600).await.unwrap();
|
||||||
|
return Ok(Some(embed(
|
||||||
|
color,
|
||||||
|
article.title.unwrap().content,
|
||||||
|
incident_page,
|
||||||
|
trim_old_content(&new_content),
|
||||||
|
Timestamp::from(article.updated.unwrap())
|
||||||
|
)));
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
save_to_redis(&rkey, &incident).await?;
|
||||||
|
redis.set(&rkey_content, &new_content).await.unwrap();
|
||||||
|
return Ok(Some(embed(
|
||||||
|
color,
|
||||||
|
article.title.unwrap().content,
|
||||||
|
incident_page,
|
||||||
|
trim_old_content(&new_content),
|
||||||
|
Timestamp::from(article.updated.unwrap())
|
||||||
|
)));
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
task_err("RSS:GPortal", &format!("Incident ID does not match the expected RegEx pattern! ({})", &article.links[0].href));
|
||||||
|
Ok(None)
|
||||||
|
}
|
||||||
|
}
|
154
src/internals/tasks/rss/processor.rs
Normal file
154
src/internals/tasks/rss/processor.rs
Normal file
@ -0,0 +1,154 @@
|
|||||||
|
use super::{
|
||||||
|
task_err,
|
||||||
|
TASK_NAME,
|
||||||
|
BINARY_PROPERTIES,
|
||||||
|
get_redis,
|
||||||
|
esxi::esxi_embed,
|
||||||
|
github::github_embed,
|
||||||
|
gportal::gportal_embed,
|
||||||
|
rust_msg::rust_message
|
||||||
|
};
|
||||||
|
|
||||||
|
use regex::Regex;
|
||||||
|
use poise::serenity_prelude::{
|
||||||
|
Context,
|
||||||
|
ChannelId,
|
||||||
|
EditMessage,
|
||||||
|
CreateMessage
|
||||||
|
};
|
||||||
|
|
||||||
|
// This is for building up the embed with the feed data
|
||||||
|
/* std::fs::File::create("rss_name.log").unwrap();
|
||||||
|
std::fs::write("rss_name.log", format!("{:#?}", feed))?; */
|
||||||
|
|
||||||
|
pub async fn feed_processor(ctx: &Context) {
|
||||||
|
let mut log_msgs: Vec<String> = Vec::new();
|
||||||
|
|
||||||
|
match esxi_embed().await {
|
||||||
|
Ok(Some(embed)) => {
|
||||||
|
ChannelId::new(BINARY_PROPERTIES.rss_channel).send_message(&ctx.http, CreateMessage::new().add_embed(embed)).await.unwrap();
|
||||||
|
},
|
||||||
|
Ok(None) => (),
|
||||||
|
Err(y) => {
|
||||||
|
log_msgs.push(format!("**[{TASK_NAME}:ESXi:Error]:** Feed failed with the following error:```\n{}\n```", y));
|
||||||
|
task_err(&TASK_NAME, &y.to_string())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
match gportal_embed().await {
|
||||||
|
Ok(Some(embed)) => {
|
||||||
|
let redis = get_redis().await;
|
||||||
|
let rkey = "RSS_GPortal_MsgID";
|
||||||
|
let channel = ChannelId::new(BINARY_PROPERTIES.rss_channel);
|
||||||
|
|
||||||
|
// Check if the message ID is in Redis
|
||||||
|
match redis.get(&rkey).await {
|
||||||
|
Ok(Some(msg_id_key)) => {
|
||||||
|
// Fetch the cached content
|
||||||
|
let cached_content: Option<String> = redis.get("RSS_GPortal_Content").await.unwrap_or(None);
|
||||||
|
|
||||||
|
if let Ok(msg_id) = msg_id_key.parse::<u64>() {
|
||||||
|
// Attempt to edit the message
|
||||||
|
if let Ok(mut message) = channel.message(&ctx.http, msg_id).await {
|
||||||
|
let new_desc = message.embeds[0].description.clone().unwrap();
|
||||||
|
|
||||||
|
if cached_content.as_deref() != Some(&new_desc) {
|
||||||
|
message.edit(&ctx.http, EditMessage::new().embed(embed)).await.unwrap();
|
||||||
|
}
|
||||||
|
|
||||||
|
if Regex::new(r"(?i)\bresolved\b").unwrap().is_match(&new_desc) {
|
||||||
|
message.reply(&ctx.http, "This incident has been marked as resolved!").await.unwrap();
|
||||||
|
redis.del(&rkey).await.unwrap();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
// If the message is invalid ID, send a new message instead
|
||||||
|
let message = channel.send_message(&ctx.http, CreateMessage::new()
|
||||||
|
.content("*Uh-oh! G-Portal is having issues!*").add_embed(embed)
|
||||||
|
).await.unwrap();
|
||||||
|
redis.set(&rkey, &message.id.to_string()).await.unwrap();
|
||||||
|
redis.expire(&rkey, 36000).await.unwrap();
|
||||||
|
}
|
||||||
|
},
|
||||||
|
Ok(None) | Err(_) => {
|
||||||
|
// If the message is not found, send a new message instead
|
||||||
|
let message = channel.send_message(&ctx.http, CreateMessage::new()
|
||||||
|
.content("*Uh-oh! G-Portal is having issues!*").add_embed(embed)
|
||||||
|
).await.unwrap();
|
||||||
|
redis.set(&rkey, &message.id.to_string()).await.unwrap();
|
||||||
|
redis.expire(&rkey, 36000).await.unwrap();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
Ok(None) => (),
|
||||||
|
Err(y) => {
|
||||||
|
log_msgs.push(format!("**[{TASK_NAME}:GPortal:Error]:** Feed failed with the following error:```\n{}\n```", y));
|
||||||
|
task_err(&TASK_NAME, &y.to_string())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
match github_embed().await {
|
||||||
|
Ok(Some(embed)) => {
|
||||||
|
let redis = get_redis().await;
|
||||||
|
let rkey = "RSS_GitHub_MsgID";
|
||||||
|
let channel = ChannelId::new(BINARY_PROPERTIES.rss_channel);
|
||||||
|
|
||||||
|
// Check if the message ID is in Redis
|
||||||
|
match redis.get(&rkey).await {
|
||||||
|
Ok(Some(msg_id_key)) => {
|
||||||
|
// Fetch the cached content
|
||||||
|
let cached_content: Option<String> = redis.get("RSS_GitHub_Content").await.unwrap_or(None);
|
||||||
|
|
||||||
|
if let Ok(msg_id) = msg_id_key.parse::<u64>() {
|
||||||
|
// Attempt to edit the message
|
||||||
|
if let Ok(mut message) = channel.message(&ctx.http, msg_id).await {
|
||||||
|
let new_desc = message.embeds[0].description.clone().unwrap();
|
||||||
|
|
||||||
|
if cached_content.as_deref() != Some(&new_desc) {
|
||||||
|
message.edit(&ctx.http, EditMessage::new().embed(embed)).await.unwrap();
|
||||||
|
}
|
||||||
|
|
||||||
|
if Regex::new(r"(?i)\bresolved\b").unwrap().is_match(&new_desc) {
|
||||||
|
message.reply(&ctx.http, "This incident has been marked as resolved!").await.unwrap();
|
||||||
|
redis.del(&rkey).await.unwrap();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
// If the message is invalid ID, send a new message instead
|
||||||
|
let message = channel.send_message(&ctx.http, CreateMessage::new().add_embed(embed)).await.unwrap();
|
||||||
|
redis.set(&rkey, &message.id.to_string()).await.unwrap();
|
||||||
|
redis.expire(&rkey, 36000).await.unwrap();
|
||||||
|
}
|
||||||
|
},
|
||||||
|
Ok(None) | Err(_) => {
|
||||||
|
// If the message is not found, send a new message instead
|
||||||
|
let message = channel.send_message(&ctx.http, CreateMessage::new().add_embed(embed)).await.unwrap();
|
||||||
|
redis.set(&rkey, &message.id.to_string()).await.unwrap();
|
||||||
|
redis.expire(&rkey, 36000).await.unwrap();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
Ok(None) => (),
|
||||||
|
Err(y) => {
|
||||||
|
log_msgs.push(format!("**[{TASK_NAME}:GitHub:Error]:** Feed failed with the following error:```\n{}\n```", y));
|
||||||
|
task_err(&TASK_NAME, &y.to_string())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
match rust_message().await {
|
||||||
|
Ok(Some(content)) => {
|
||||||
|
ChannelId::new(BINARY_PROPERTIES.rss_channel).send_message(&ctx.http, CreateMessage::new().content(content)).await.unwrap();
|
||||||
|
},
|
||||||
|
Ok(None) => (),
|
||||||
|
Err(y) => {
|
||||||
|
log_msgs.push(format!("**[{TASK_NAME}:RustBlog:Error]:** Feed failed with the following error:```\n{}\n```", y));
|
||||||
|
task_err(&TASK_NAME, &y.to_string())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if !log_msgs.is_empty() {
|
||||||
|
ChannelId::new(BINARY_PROPERTIES.kon_logs).send_message(
|
||||||
|
&ctx.http, CreateMessage::new().content(log_msgs.join("\n"))
|
||||||
|
).await.unwrap();
|
||||||
|
}
|
||||||
|
}
|
53
src/internals/tasks/rss/rust_msg.rs
Normal file
53
src/internals/tasks/rss/rust_msg.rs
Normal file
@ -0,0 +1,53 @@
|
|||||||
|
use crate::Error;
|
||||||
|
use super::{
|
||||||
|
task_err,
|
||||||
|
REDIS_EXPIRY_SECS,
|
||||||
|
get_redis,
|
||||||
|
save_to_redis,
|
||||||
|
fetch_feed,
|
||||||
|
parse
|
||||||
|
};
|
||||||
|
|
||||||
|
use std::io::Cursor;
|
||||||
|
use regex::Regex;
|
||||||
|
|
||||||
|
pub async fn rust_message() -> Result<Option<String>, Error> {
|
||||||
|
let redis = get_redis().await;
|
||||||
|
let rkey = "RSS_RustBlog";
|
||||||
|
let url = "https://blog.rust-lang.org/feed.xml";
|
||||||
|
|
||||||
|
let res = fetch_feed(url).await?;
|
||||||
|
let data = res.text().await?;
|
||||||
|
let cursor = Cursor::new(data);
|
||||||
|
|
||||||
|
let feed = parse(cursor).unwrap();
|
||||||
|
let article = feed.entries[0].clone();
|
||||||
|
let article_id = article.id.clone();
|
||||||
|
|
||||||
|
fn get_blog_title(input: String) -> Option<String> {
|
||||||
|
let re = Regex::new(r"https://blog\.rust-lang\.org/(\d{4}/\d{2}/\d{2}/[^/]+)").unwrap();
|
||||||
|
re.captures(input.as_str()).and_then(|caps| caps.get(1).map(|m| m.as_str().to_string()))
|
||||||
|
}
|
||||||
|
|
||||||
|
let cached_blog = redis.get(&rkey).await.unwrap().unwrap_or_default();
|
||||||
|
|
||||||
|
if cached_blog.is_empty() {
|
||||||
|
redis.set(&rkey, get_blog_title(article.id).unwrap().as_str()).await.unwrap();
|
||||||
|
if let Err(y) = redis.expire(&rkey, REDIS_EXPIRY_SECS).await {
|
||||||
|
task_err("RSS", format!("[RedisExpiry]: {}", y).as_str());
|
||||||
|
}
|
||||||
|
return Ok(None);
|
||||||
|
}
|
||||||
|
|
||||||
|
if let Some(blog) = get_blog_title(article.id) {
|
||||||
|
if blog == cached_blog {
|
||||||
|
return Ok(None);
|
||||||
|
} else {
|
||||||
|
save_to_redis(&rkey, &blog).await?;
|
||||||
|
Ok(Some(format!("Rust Team has put out a new article!\n**[{}](<{}>)**", article.links[0].title.clone().unwrap(), article.links[0].href)))
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
task_err("RSS:RustBlog", &format!("Article URL does not match the expected RegEx pattern! ({})", article_id));
|
||||||
|
Ok(None)
|
||||||
|
}
|
||||||
|
}
|
Loading…
Reference in New Issue
Block a user