Compare commits
No commits in common. "483ba390e9e300c373607ec86db5f744644e5e84" and "f078eff53e2ac142c18ca5bb66f9ece9cf19e928" have entirely different histories.
483ba390e9
...
f078eff53e
6
Cargo.lock
generated
6
Cargo.lock
generated
@ -182,9 +182,9 @@ checksum = "1fd0f2584146f6f2ef48085050886acf353beff7305ebd1ae69500e27c67f64b"
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "bytes"
|
name = "bytes"
|
||||||
version = "1.7.1"
|
version = "1.7.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "8318a53db07bb3f8dca91a600466bdb3f2eaadeedfdbcf02e1accbad9271ba50"
|
checksum = "fca2be1d5c43812bae364ee3f30b3afcb7877cf59f4aeb94c66f313a41d2fac9"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "camino"
|
name = "camino"
|
||||||
@ -1020,7 +1020,7 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "kon"
|
name = "kon"
|
||||||
version = "0.3.8"
|
version = "0.3.7"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"bb8",
|
"bb8",
|
||||||
"bb8-postgres",
|
"bb8-postgres",
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
[package]
|
[package]
|
||||||
name = "kon"
|
name = "kon"
|
||||||
version = "0.3.8"
|
version = "0.3.7"
|
||||||
edition = "2021"
|
edition = "2021"
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
@ -15,8 +15,8 @@ poise = "0.6.1"
|
|||||||
regex = "1.10.5"
|
regex = "1.10.5"
|
||||||
reqwest = { version = "0.12.5", features = ["json"] }
|
reqwest = { version = "0.12.5", features = ["json"] }
|
||||||
serde = "1.0.204"
|
serde = "1.0.204"
|
||||||
serde_json = "1.0.122"
|
serde_json = "1.0.120"
|
||||||
sysinfo = "0.31.2"
|
sysinfo = "0.31.0"
|
||||||
tokenservice-client = { version = "0.3.3", registry = "gitea" }
|
tokenservice-client = { version = "0.3.3", registry = "gitea" }
|
||||||
tokio = { version = "1.39.2", features = ["macros", "signal", "rt-multi-thread"] }
|
tokio = { version = "1.39.2", features = ["macros", "signal", "rt-multi-thread"] }
|
||||||
tokio-postgres = "0.7.11"
|
tokio-postgres = "0.7.11"
|
||||||
|
@ -1,5 +1,6 @@
|
|||||||
use crate::internals::utils::token_path;
|
use crate::internals::utils::token_path;
|
||||||
|
|
||||||
|
use poise::serenity_prelude::prelude::TypeMapKey;
|
||||||
use bb8_redis::{
|
use bb8_redis::{
|
||||||
bb8::Pool,
|
bb8::Pool,
|
||||||
redis::cmd,
|
redis::cmd,
|
||||||
@ -18,6 +19,10 @@ pub struct RedisController {
|
|||||||
pool: Pool<RedisConnectionManager>
|
pool: Pool<RedisConnectionManager>
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl TypeMapKey for RedisController {
|
||||||
|
type Value = RedisController;
|
||||||
|
}
|
||||||
|
|
||||||
impl RedisController {
|
impl RedisController {
|
||||||
pub async fn new() -> Result<Self, RedisError> {
|
pub async fn new() -> Result<Self, RedisError> {
|
||||||
let manager = RedisConnectionManager::new(token_path().await.redis_uri.as_str())?;
|
let manager = RedisConnectionManager::new(token_path().await.redis_uri.as_str())?;
|
||||||
|
@ -22,7 +22,6 @@ use std::{
|
|||||||
use poise::serenity_prelude::{
|
use poise::serenity_prelude::{
|
||||||
Context,
|
Context,
|
||||||
ChannelId,
|
ChannelId,
|
||||||
EditMessage,
|
|
||||||
CreateMessage,
|
CreateMessage,
|
||||||
CreateEmbed,
|
CreateEmbed,
|
||||||
CreateEmbedAuthor,
|
CreateEmbedAuthor,
|
||||||
@ -48,12 +47,6 @@ async fn get_redis() -> Arc<RedisController> {
|
|||||||
REDIS_SERVICE.get().unwrap().clone()
|
REDIS_SERVICE.get().unwrap().clone()
|
||||||
}
|
}
|
||||||
|
|
||||||
// Moved up here as a copy-paste
|
|
||||||
|
|
||||||
// This is for building up the embed with the feed data
|
|
||||||
// std::fs::File::create("rss_name.log").unwrap();
|
|
||||||
// std::fs::write("rss_name.log", format!("{:#?}", feed))?;
|
|
||||||
|
|
||||||
fn format_href_to_discord(input: &str) -> String {
|
fn format_href_to_discord(input: &str) -> String {
|
||||||
let re = Regex::new(r#"<a href="([^"]+)">([^<]+)</a>"#).unwrap();
|
let re = Regex::new(r#"<a href="([^"]+)">([^<]+)</a>"#).unwrap();
|
||||||
re.replace_all(input, r"[$2]($1)").to_string()
|
re.replace_all(input, r"[$2]($1)").to_string()
|
||||||
@ -133,6 +126,10 @@ async fn esxi_embed() -> Result<Option<CreateEmbed>, Error> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// This is for building up the embed with the feed data
|
||||||
|
// std::fs::File::create("esxi_atom.log").unwrap();
|
||||||
|
// std::fs::write("esxi_atom.log", format!("{:#?}", feed))?;
|
||||||
|
|
||||||
let cached_patch = redis.get(&rkey).await.unwrap().unwrap_or_default();
|
let cached_patch = redis.get(&rkey).await.unwrap().unwrap_or_default();
|
||||||
|
|
||||||
if cached_patch.is_empty() {
|
if cached_patch.is_empty() {
|
||||||
@ -172,7 +169,6 @@ async fn esxi_embed() -> Result<Option<CreateEmbed>, Error> {
|
|||||||
async fn gportal_embed() -> Result<Option<CreateEmbed>, Error> {
|
async fn gportal_embed() -> Result<Option<CreateEmbed>, Error> {
|
||||||
let redis = get_redis().await;
|
let redis = get_redis().await;
|
||||||
let rkey = "RSS_GPortal";
|
let rkey = "RSS_GPortal";
|
||||||
let rkey_content = format!("{}_Content", rkey);
|
|
||||||
let url = "https://status.g-portal.com/history.atom";
|
let url = "https://status.g-portal.com/history.atom";
|
||||||
|
|
||||||
let res = fetch_feed(url).await?;
|
let res = fetch_feed(url).await?;
|
||||||
@ -193,12 +189,14 @@ async fn gportal_embed() -> Result<Option<CreateEmbed>, Error> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// This is for building up the embed with the feed data
|
||||||
|
// std::fs::File::create("gportal.log").unwrap();
|
||||||
|
// std::fs::write("gportal.log", format!("{:#?}", feed))?;
|
||||||
|
|
||||||
let cached_incident = redis.get(&rkey).await.unwrap().unwrap_or_default();
|
let cached_incident = redis.get(&rkey).await.unwrap().unwrap_or_default();
|
||||||
let new_content = format_html_to_discord(article.content.unwrap().body.unwrap());
|
|
||||||
|
|
||||||
if cached_incident.is_empty() {
|
if cached_incident.is_empty() {
|
||||||
redis.set(&rkey, &get_incident_id(&article.links[0].href).unwrap()).await.unwrap();
|
redis.set(&rkey, get_incident_id(&article.links[0].href).unwrap().as_str()).await.unwrap();
|
||||||
redis.set(&rkey_content, &new_content).await.unwrap();
|
|
||||||
if let Err(y) = redis.expire(&rkey, REDIS_EXPIRY_SECS).await {
|
if let Err(y) = redis.expire(&rkey, REDIS_EXPIRY_SECS).await {
|
||||||
task_err("RSS", format!("[RedisExpiry]: {}", y).as_str());
|
task_err("RSS", format!("[RedisExpiry]: {}", y).as_str());
|
||||||
}
|
}
|
||||||
@ -207,30 +205,17 @@ async fn gportal_embed() -> Result<Option<CreateEmbed>, Error> {
|
|||||||
|
|
||||||
if let Some(incident) = get_incident_id(&article.links[0].href) {
|
if let Some(incident) = get_incident_id(&article.links[0].href) {
|
||||||
if incident == cached_incident {
|
if incident == cached_incident {
|
||||||
let cached_content: String = redis.get(&format!("{}_content", rkey)).await.unwrap().unwrap_or_default();
|
return Ok(None);
|
||||||
if cached_content == new_content {
|
|
||||||
return Ok(None);
|
|
||||||
} else {
|
|
||||||
redis.set(&rkey_content, &new_content).await.unwrap();
|
|
||||||
redis.expire(&rkey_content, 21600).await.unwrap();
|
|
||||||
return Ok(Some(CreateEmbed::new()
|
|
||||||
.color(0xC23EE8)
|
|
||||||
.title(article.title.unwrap().content)
|
|
||||||
.url(incident_page)
|
|
||||||
.description(new_content)
|
|
||||||
.timestamp(Timestamp::from(article.updated.unwrap()))
|
|
||||||
));
|
|
||||||
}
|
|
||||||
} else {
|
} else {
|
||||||
save_to_redis(&rkey, &incident).await?;
|
save_to_redis(&rkey, &incident).await?;
|
||||||
redis.set(&rkey_content, &new_content).await.unwrap();
|
Ok(Some(CreateEmbed::new()
|
||||||
return Ok(Some(CreateEmbed::new()
|
|
||||||
.color(0xC23EE8)
|
.color(0xC23EE8)
|
||||||
.title(article.title.unwrap().content)
|
.title(article.title.unwrap().content)
|
||||||
.url(incident_page)
|
.url(incident_page)
|
||||||
.description(new_content)
|
.description(format!("{}", format_html_to_discord(article.content.unwrap().body.unwrap())
|
||||||
.timestamp(Timestamp::from(article.updated.unwrap()))
|
))
|
||||||
));
|
.timestamp(Timestamp::from(article.updated.unwrap())))
|
||||||
|
)
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
task_err("RSS:GPortal", &format!("Incident ID does not match the expected RegEx pattern! ({})", &article.links[0].href));
|
task_err("RSS:GPortal", &format!("Incident ID does not match the expected RegEx pattern! ({})", &article.links[0].href));
|
||||||
@ -256,6 +241,10 @@ async fn rust_message() -> Result<Option<String>, Error> {
|
|||||||
re.captures(input.as_str()).and_then(|caps| caps.get(1).map(|m| m.as_str().to_string()))
|
re.captures(input.as_str()).and_then(|caps| caps.get(1).map(|m| m.as_str().to_string()))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// This is for building up the message with the feed data
|
||||||
|
// std::fs::File::create("rustblog.log").unwrap();
|
||||||
|
// std::fs::write("rustblog.log", format!("{:#?}", feed))?;
|
||||||
|
|
||||||
let cached_blog = redis.get(&rkey).await.unwrap().unwrap_or_default();
|
let cached_blog = redis.get(&rkey).await.unwrap().unwrap_or_default();
|
||||||
|
|
||||||
if cached_blog.is_empty() {
|
if cached_blog.is_empty() {
|
||||||
@ -286,49 +275,43 @@ pub async fn rss(ctx: Arc<Context>) -> Result<(), Error> {
|
|||||||
|
|
||||||
loop {
|
loop {
|
||||||
interval.tick().await;
|
interval.tick().await;
|
||||||
let mut log_msgs: Vec<String> = Vec::new();
|
|
||||||
|
|
||||||
match esxi_embed().await {
|
match esxi_embed().await {
|
||||||
Ok(Some(embed)) => {
|
Ok(Some(embed)) => {
|
||||||
ChannelId::new(BINARY_PROPERTIES.rss_channel).send_message(&ctx.http, CreateMessage::new().add_embed(embed)).await.unwrap();
|
ChannelId::new(BINARY_PROPERTIES.rss_channel).send_message(&ctx.http, CreateMessage::new().add_embed(embed)).await.unwrap();
|
||||||
},
|
},
|
||||||
Ok(None) => {
|
Ok(None) => {
|
||||||
log_msgs.push("**[RSS:ESXi]:** Article returned no new content.".to_string());
|
ChannelId::new(BINARY_PROPERTIES.kon_logs).send_message(
|
||||||
|
&ctx.http, CreateMessage::new()
|
||||||
|
.content("**[RSS:ESXi]:** Article returned no new content.")
|
||||||
|
).await.unwrap();
|
||||||
},
|
},
|
||||||
Err(y) => {
|
Err(y) => {
|
||||||
log_msgs.push(format!("**[RSS:ESXi:Error]:** Feed failed with the following error:```\n{}\n```", y));
|
ChannelId::new(BINARY_PROPERTIES.kon_logs).send_message(
|
||||||
|
&ctx.http, CreateMessage::new()
|
||||||
|
.content(format!("**[RSS:ESXi:Error]:** Feed failed with the following error:```\n{}\n```", y))
|
||||||
|
).await.unwrap();
|
||||||
task_err(&task_name, &y.to_string())
|
task_err(&task_name, &y.to_string())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
match gportal_embed().await {
|
match gportal_embed().await {
|
||||||
Ok(Some(embed)) => {
|
Ok(Some(embed)) => {
|
||||||
let redis = get_redis().await;
|
ChannelId::new(BINARY_PROPERTIES.rss_channel).send_message(&ctx.http, CreateMessage::new()
|
||||||
let rkey = "RSS_GPortal_MsgID";
|
.content("*Uh-oh! G-Portal is having issues!*").add_embed(embed)
|
||||||
let channel = ChannelId::new(BINARY_PROPERTIES.rss_channel);
|
).await.unwrap();
|
||||||
|
|
||||||
// Check if the message ID is in Redis
|
|
||||||
if let Ok(Some(msg_id_key)) = redis.get(&rkey).await {
|
|
||||||
if let Ok(msg_id) = msg_id_key.parse::<u64>() {
|
|
||||||
// Attempt to edit the message
|
|
||||||
if let Ok(mut message) = channel.message(&ctx.http, msg_id).await {
|
|
||||||
message.edit(&ctx.http, EditMessage::new().embed(embed)).await.unwrap();
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
// If the message is not found or invalid ID, send a new message instead
|
|
||||||
let message = channel.send_message(&ctx.http, CreateMessage::new()
|
|
||||||
.content("*Uh-oh! G-Portal is having issues!*").add_embed(embed)
|
|
||||||
).await.unwrap();
|
|
||||||
redis.set(&rkey, &message.id.to_string()).await.unwrap();
|
|
||||||
redis.expire(&rkey, 36000).await.unwrap();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
},
|
},
|
||||||
Ok(None) => {
|
Ok(None) => {
|
||||||
log_msgs.push("**[RSS:GPortal]:** Article returned no new content.".to_string());
|
ChannelId::new(BINARY_PROPERTIES.kon_logs).send_message(
|
||||||
|
&ctx.http, CreateMessage::new()
|
||||||
|
.content("**[RSS:GPortal]:** Article returned no new content.")
|
||||||
|
).await.unwrap();
|
||||||
},
|
},
|
||||||
Err(y) => {
|
Err(y) => {
|
||||||
log_msgs.push(format!("**[RSS:GPortal:Error]:** Feed failed with the following error:```\n{}\n```", y));
|
ChannelId::new(BINARY_PROPERTIES.kon_logs).send_message(
|
||||||
|
&ctx.http, CreateMessage::new()
|
||||||
|
.content(format!("**[RSS:GPortal:Error]:** Feed failed with the following error:```\n{}\n```", y))
|
||||||
|
).await.unwrap();
|
||||||
task_err(&task_name, &y.to_string())
|
task_err(&task_name, &y.to_string())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -338,18 +321,18 @@ pub async fn rss(ctx: Arc<Context>) -> Result<(), Error> {
|
|||||||
ChannelId::new(BINARY_PROPERTIES.rss_channel).send_message(&ctx.http, CreateMessage::new().content(content)).await.unwrap();
|
ChannelId::new(BINARY_PROPERTIES.rss_channel).send_message(&ctx.http, CreateMessage::new().content(content)).await.unwrap();
|
||||||
},
|
},
|
||||||
Ok(None) => {
|
Ok(None) => {
|
||||||
log_msgs.push("**[RSS:RustBlog]:** Article returned no new content.".to_string());
|
ChannelId::new(BINARY_PROPERTIES.kon_logs).send_message(
|
||||||
|
&ctx.http, CreateMessage::new()
|
||||||
|
.content("**[RSS:RustBlog]:** Article returned no new content.")
|
||||||
|
).await.unwrap();
|
||||||
},
|
},
|
||||||
Err(y) => {
|
Err(y) => {
|
||||||
log_msgs.push(format!("**[RSS:RustBlog:Error]:** Feed failed with the following error:```\n{}\n```", y));
|
ChannelId::new(BINARY_PROPERTIES.kon_logs).send_message(
|
||||||
|
&ctx.http, CreateMessage::new()
|
||||||
|
.content(format!("**[RSS:RustBlog:Error]:** Feed failed with the following error:```\n{}\n```", y))
|
||||||
|
).await.unwrap();
|
||||||
task_err(&task_name, &y.to_string())
|
task_err(&task_name, &y.to_string())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if !log_msgs.is_empty() {
|
|
||||||
ChannelId::new(BINARY_PROPERTIES.kon_logs).send_message(
|
|
||||||
&ctx.http, CreateMessage::new().content(log_msgs.join("\n"))
|
|
||||||
).await.unwrap();
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
Loading…
Reference in New Issue
Block a user