Add RSS implementation
Some checks failed
Build and push container image / build (push) Has been cancelled

This commit is contained in:
toast 2024-07-28 09:39:56 +10:00
parent de13b472bb
commit 3d10e4973f
12 changed files with 562 additions and 20 deletions

1
.gitignore vendored
View File

@ -1,2 +1,3 @@
target
.env
*.log

110
Cargo.lock generated
View File

@ -118,6 +118,17 @@ dependencies = [
"tokio-postgres",
]
[[package]]
name = "bb8-redis"
version = "0.15.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7eb4f141b33a750b5f667c445bd8588de10b8f2b045cd2aabc040ca746fb53ae"
dependencies = [
"async-trait",
"bb8",
"redis",
]
[[package]]
name = "bitflags"
version = "1.3.2"
@ -229,6 +240,20 @@ dependencies = [
"windows-targets 0.52.5",
]
[[package]]
name = "combine"
version = "4.6.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ba5a308b75df32fe02788e748662718f03fde005016435c444eea572398219fd"
dependencies = [
"bytes",
"futures-core",
"memchr",
"pin-project-lite",
"tokio",
"tokio-util",
]
[[package]]
name = "core-foundation"
version = "0.9.4"
@ -458,6 +483,23 @@ version = "2.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9fc0510504f03c51ada170672ac806f1f105a88aa97a5281117e1ddc3368e51a"
[[package]]
name = "feed-rs"
version = "2.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2c546f6f85e02eaca09e6a6dd22fe4db839745f55ae16c8f36626980a57f5bc4"
dependencies = [
"chrono",
"mediatype",
"quick-xml",
"regex",
"serde",
"serde_json",
"siphasher 1.0.1",
"url",
"uuid",
]
[[package]]
name = "finl_unicode"
version = "1.2.0"
@ -972,11 +1014,13 @@ dependencies = [
[[package]]
name = "kon"
version = "0.3.3"
version = "0.3.4"
dependencies = [
"bb8",
"bb8-postgres",
"bb8-redis",
"cargo_toml",
"feed-rs",
"once_cell",
"os_info",
"poise",
@ -993,9 +1037,9 @@ dependencies = [
[[package]]
name = "lazy_static"
version = "1.4.0"
version = "1.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646"
checksum = "bbd2bcb4c963f2ddae06a2efc7e9f3591312473c50c6685e1f298068316e66fe"
[[package]]
name = "libc"
@ -1056,6 +1100,12 @@ dependencies = [
"digest",
]
[[package]]
name = "mediatype"
version = "0.19.18"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8878cd8d1b3c8c8ae4b2ba0a36652b7cf192f618a599a7fbdfa25cffd4ea72dd"
[[package]]
name = "memchr"
version = "2.7.2"
@ -1270,7 +1320,7 @@ version = "0.11.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "90fcb95eef784c2ac79119d1dd819e162b5da872ce6f3c3abe1e8ca1c082f72b"
dependencies = [
"siphasher",
"siphasher 0.3.11",
]
[[package]]
@ -1407,6 +1457,16 @@ version = "1.2.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a1d01941d82fa2ab50be1e79e6714289dd7cde78eba4c074bc5a4374f650dfe0"
[[package]]
name = "quick-xml"
version = "0.36.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "96a05e2e8efddfa51a84ca47cec303fac86c8541b686d37cac5efc0e094417bc"
dependencies = [
"encoding_rs",
"memchr",
]
[[package]]
name = "quote"
version = "1.0.36"
@ -1466,6 +1526,25 @@ dependencies = [
"crossbeam-utils",
]
[[package]]
name = "redis"
version = "0.25.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e0d7a6955c7511f60f3ba9e86c6d02b3c3f144f8c24b288d1f4e18074ab8bbec"
dependencies = [
"async-trait",
"bytes",
"combine",
"futures-util",
"itoa",
"percent-encoding",
"pin-project-lite",
"ryu",
"tokio",
"tokio-util",
"url",
]
[[package]]
name = "redox_syscall"
version = "0.4.1"
@ -1932,6 +2011,12 @@ version = "0.3.11"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "38b58827f4464d87d377d175e90bf58eb00fd8716ff0a62f80356b5e61555d0d"
[[package]]
name = "siphasher"
version = "1.0.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "56199f7ddabf13fe5074ce809e7d3f42b42ae711800501b5b16ea82ad029c39d"
[[package]]
name = "skeptic"
version = "0.13.7"
@ -2170,9 +2255,9 @@ dependencies = [
[[package]]
name = "tokio"
version = "1.39.1"
version = "1.39.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d040ac2b29ab03b09d4129c2f5bbd012a3ac2f79d38ff506a4bf8dd34b0eac8a"
checksum = "daa4fb1bc778bd6f04cbfc4bb2d06a7396a8f299dc33ea1900cedaa316f467b1"
dependencies = [
"backtrace",
"bytes",
@ -2557,9 +2642,9 @@ dependencies = [
[[package]]
name = "url"
version = "2.5.0"
version = "2.5.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "31e6302e3bb753d46e83516cae55ae196fc0c309407cf11ab35cc51a4c2a4633"
checksum = "22784dbdf76fdde8af1aeda5622b546b422b6fc585325248a2bf9f5e41e94d6c"
dependencies = [
"form_urlencoded",
"idna 0.5.0",
@ -2573,6 +2658,15 @@ version = "0.7.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "09cc8ee72d2a9becf2f2febe0205bbed8fc6615b7cb429ad062dc7b7ddd036a9"
[[package]]
name = "uuid"
version = "1.10.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "81dfa00651efa65069b0b6b651f4aaa31ba9e3c3ce0137aaad053604ee7e0314"
dependencies = [
"getrandom",
]
[[package]]
name = "vcpkg"
version = "0.2.15"

View File

@ -1,12 +1,14 @@
[package]
name = "kon"
version = "0.3.3"
version = "0.3.4"
edition = "2021"
[dependencies]
bb8 = "0.8.5"
bb8-postgres = "0.8.1"
bb8-redis = "0.15.0"
cargo_toml = "0.20.4"
feed-rs = "2.1.0"
once_cell = "1.19.0"
os_info = "3.8.2"
poise = "0.6.1"
@ -16,7 +18,7 @@ serde = "1.0.204"
serde_json = "1.0.120"
sysinfo = "0.30.13"
tokenservice-client = { version = "0.3.2", registry = "gitea" }
tokio = { version = "1.39.1", features = ["macros", "signal", "rt-multi-thread"] }
tokio = { version = "1.39.2", features = ["macros", "signal", "rt-multi-thread"] }
tokio-postgres = "0.7.11"
uptime_lib = "0.3.1"

View File

@ -1,4 +1,4 @@
FROM rust:1.79-alpine3.20 AS chef
FROM rust:1.80-alpine3.20 AS chef
ENV RUSTFLAGS="-C target-feature=-crt-static"
ARG CARGO_TOKEN
RUN apk add --no-cache openssl-dev musl-dev

View File

@ -1,11 +1,18 @@
services:
bot:
container_name: kon
#image: 'git.toast-server.net/toast/kon:main'
#image: git.toast-server.net/toast/kon:main
build: .
restart: unless-stopped
# depends_on:
# - db
depends_on:
- cache
cache:
container_name: kon-redis
image: redis/redis-stack-server:7.4.0-rc2
restart: unless-stopped
ports:
- 37935:6379/tcp
# db:
# container_name: kon-database
# image: postgres:16.2-alpine3.19@sha256:951bfda460300925caa3949eaa092ba022e9aec191bbea9056a39e2382260b27

View File

@ -1 +1,2 @@
pub mod cache;
// pub mod database;

91
src/controllers/cache.rs Normal file
View File

@ -0,0 +1,91 @@
use crate::internals::utils::token_path;
use poise::serenity_prelude::prelude::TypeMapKey;
use bb8_redis::{
bb8::Pool,
redis::cmd,
redis::RedisError,
redis::RedisResult,
redis::AsyncCommands,
RedisConnectionManager
};
use tokio::time::{
sleep,
Duration
};
#[derive(Debug)]
pub struct RedisController {
pool: Pool<RedisConnectionManager>
}
impl TypeMapKey for RedisController {
type Value = RedisController;
}
impl RedisController {
pub async fn new() -> Result<Self, RedisError> {
let manager = RedisConnectionManager::new(token_path().await.redis_uri.as_str())?;
let pool = Self::create_pool(manager).await;
Ok(Self { pool })
}
async fn create_pool(manager: RedisConnectionManager) -> Pool<RedisConnectionManager> {
let mut backoff = 1;
loop {
match Pool::builder().max_size(20).retry_connection(true).build(manager.clone()).await {
Ok(pool) => {
match pool.get().await {
Ok(mut conn) => {
let ping: RedisResult<String> = cmd("PING").query_async(&mut *conn).await;
match ping {
Ok(_) => {
println!("Redis[Info]: Successfully connected");
return pool.clone();
},
Err(e) => {
eprintln!("Redis[Error]: {}, retrying in {} seconds", e, backoff);
Self::apply_backoff(&mut backoff).await;
}
}
},
Err(e) => {
eprintln!("Redis[ConnError]: {}, retrying in {} seconds", e, backoff);
Self::apply_backoff(&mut backoff).await;
}
}
}
Err(e) => {
eprintln!("Redis[PoolError]: {}, retrying in {} seconds", e, backoff);
Self::apply_backoff(&mut backoff).await;
}
}
}
}
async fn apply_backoff(backoff: &mut u64) {
sleep(Duration::from_secs(*backoff)).await;
if *backoff < 64 {
*backoff *= 2;
}
}
/// Get a key from the cache
pub async fn get(&self, key: &str) -> RedisResult<Option<String>> {
let mut conn = self.pool.get().await.unwrap();
conn.get(key).await
}
/// Set a key with a value in the cache
pub async fn set(&self, key: &str, value: &str) -> RedisResult<()> {
let mut conn = self.pool.get().await.unwrap();
conn.set(key, value).await
}
/// Set a key with an expiration time in seconds
pub async fn expire(&self, key: &str, seconds: i64) -> RedisResult<()> {
let mut conn = self.pool.get().await.unwrap();
conn.expire(key, seconds).await
}
}

View File

@ -1,4 +1,5 @@
pub mod config;
pub mod http;
pub mod tasks;
pub mod tsclient;
pub mod utils;

View File

@ -1,22 +1,24 @@
use once_cell::sync::Lazy;
use std::sync::LazyLock;
pub struct ConfigMeta {
pub guild_id: u64,
pub embed_color: i32,
pub ready_notify: u64,
pub rss_channel: u64,
pub deploy_commands: bool,
pub developers: Vec<u64>
}
#[cfg(feature = "production")]
pub static BINARY_PROPERTIES: Lazy<ConfigMeta> = Lazy::new(|| ConfigMeta::new());
pub static BINARY_PROPERTIES: LazyLock<ConfigMeta> = LazyLock::new(|| ConfigMeta::new());
#[cfg(not(feature = "production"))]
pub static BINARY_PROPERTIES: Lazy<ConfigMeta> = Lazy::new(||
pub static BINARY_PROPERTIES: LazyLock<ConfigMeta> = LazyLock::new(||
ConfigMeta::new()
.guild_id(865673694184996885)
.embed_color(0xf1d63c)
.ready_notify(865673694184996888)
.rss_channel(865673694184996888)
.deploy_commands(false)
);
@ -26,6 +28,7 @@ impl ConfigMeta {
guild_id: 865673694184996885,
embed_color: 0x5a99c7,
ready_notify: 865673694184996888,
rss_channel: 865673694184996888,
deploy_commands: false,
developers: vec![
190407856527376384 // toast.ts
@ -52,6 +55,12 @@ impl ConfigMeta {
self
}
#[cfg(not(feature = "production"))]
fn rss_channel(mut self, channel_id: u64) -> Self {
self.rss_channel = channel_id;
self
}
#[cfg(not(feature = "production"))]
fn deploy_commands(mut self, deploy: bool) -> Self {
self.deploy_commands = deploy;

9
src/internals/tasks.rs Normal file
View File

@ -0,0 +1,9 @@
pub mod rss;
fn task_info(name: &str, message: &str) {
println!("{}", format!("TaskScheduler[{}]: {}", name, message));
}
fn task_err(name: &str, message: &str) {
eprintln!("{}", format!("TaskScheduler[{}:Error]: {}", name, message));
}

305
src/internals/tasks/rss.rs Normal file
View File

@ -0,0 +1,305 @@
use crate::{
Error,
controllers::cache::RedisController
};
use super::{
super::{
http::HttpClient,
config::BINARY_PROPERTIES
},
task_info,
task_err
};
use once_cell::sync::OnceCell;
use feed_rs::parser::parse;
use reqwest::Response;
use regex::Regex;
use std::{
sync::Arc,
io::Cursor
};
use poise::serenity_prelude::{
Context,
ChannelId,
CreateMessage,
CreateEmbed,
CreateEmbedAuthor,
Timestamp
};
use tokio::time::{
Duration,
interval
};
static REDIS_EXPIRY_SECS: i64 = 7200;
static REDIS_SERVICE: OnceCell<Arc<RedisController>> = OnceCell::new();
async fn redis_() {
let redis = RedisController::new().await.unwrap();
REDIS_SERVICE.set(Arc::new(redis)).unwrap();
}
async fn get_redis() -> Arc<RedisController> {
if REDIS_SERVICE.get().is_none() {
redis_().await;
}
REDIS_SERVICE.get().unwrap().clone()
}
fn format_href_to_discord(input: &str) -> String {
let re = Regex::new(r#"<a href="([^"]+)">([^<]+)</a>"#).unwrap();
re.replace_all(input, r"[$2]($1)").to_string()
}
fn format_html_to_discord(input: String) -> String {
let mut output = input;
// Replace all instances of <p> with newlines
output = Regex::new(r#"<\s*p\s*>"#).unwrap().replace_all(&output, "\n").to_string();
output = Regex::new(r#"<\s*/\s*p\s*>"#).unwrap().replace_all(&output, "\n").to_string();
// Replace all instances of <br> and <br /> with newlines
output = Regex::new(r#"<\s*br\s*>"#).unwrap().replace_all(&output, "\n").to_string();
output = Regex::new(r#"<\s*br\s*/\s*>"#).unwrap().replace_all(&output, "\n").to_string();
// Replace all instances of <strong> with **
output = Regex::new(r#"<\s*strong\s*>"#).unwrap().replace_all(&output, "**").to_string();
output = Regex::new(r#"<\s*/\s*strong\s*>"#).unwrap().replace_all(&output, "**").to_string();
// Replace all instances of <var> and <small> with nothing
output = Regex::new(r#"<\s*var\s*>"#).unwrap().replace_all(&output, "").to_string();
output = Regex::new(r#"<\s*/\s*var\s*>"#).unwrap().replace_all(&output, "").to_string();
output = Regex::new(r#"<\s*small\s*>"#).unwrap().replace_all(&output, "").to_string();
output = Regex::new(r#"<\s*/\s*small\s*>"#).unwrap().replace_all(&output, "").to_string();
// Remove any other HTML tags
output = Regex::new(r#"<[^>]+>"#).unwrap().replace_all(&output, "").to_string();
// Replace all instances of <a href="url">text</a> with [text](url)
output = format_href_to_discord(&output);
output
}
async fn fetch_feed(url: &str) -> Result<Response, Error> {
let http = HttpClient::new();
let res = match http.get(url, "RSS-Monitor").await {
Ok(res) => res,
Err(y) => return Err(y.into())
};
Ok(res)
}
async fn save_to_redis(key: &str, value: &str) -> Result<(), Error> {
let redis = get_redis().await;
redis.set(key, value).await.unwrap();
if let Err(y) = redis.expire(key, REDIS_EXPIRY_SECS).await {
task_err("RSS", format!("[RedisExpiry]: {}", y).as_str());
}
Ok(())
}
async fn esxi_embed() -> Result<Option<CreateEmbed>, Error> {
let redis = get_redis().await;
let rkey = "RSS_ESXi";
let url = "https://esxi-patches.v-front.de/atom/ESXi-7.0.0.xml";
let res = fetch_feed(url).await?;
let data = res.text().await?;
let cursor = Cursor::new(data);
let feed = parse(cursor).unwrap();
let home_page = feed.links[0].clone().href;
let article = feed.entries[0].clone();
fn get_patch_version(input: &str) -> Option<String> {
let re = Regex::new(r#"(?i)Update\s+([0-9]+)([a-z]?)"#).unwrap();
if let Some(caps) = re.captures(input) {
let update_num = caps[1].to_string();
let letter = caps.get(2).map_or("", |m| m.as_str());
Some(format!("Update {}{}", update_num, letter))
} else {
None
}
}
// This is for building up the embed with the feed data
// std::fs::File::create("esxi_atom.log").unwrap();
// std::fs::write("esxi_atom.log", format!("{:#?}", feed))?;
let cached_patch = redis.get(&rkey).await.unwrap().unwrap_or_default();
if cached_patch.is_empty() {
redis.set(&rkey, &article.categories[3].term).await.unwrap();
if let Err(y) = redis.expire(&rkey, REDIS_EXPIRY_SECS).await {
task_err("RSS", format!("[RedisExpiry]: {}", y).as_str());
}
return Ok(None);
}
if let Some(patch) = get_patch_version(&article.categories[3].term) {
if patch == cached_patch {
return Ok(None);
} else {
save_to_redis(&rkey, &article.categories[3].term).await?;
Ok(Some(CreateEmbed::new()
.color(0x4EFBCB)
.author(CreateEmbedAuthor::new(feed.title.unwrap().content).url(home_page))
.thumbnail(feed.logo.unwrap().uri)
.description(format!(
"{} {} for {} {} has been rolled out!\n{}",
article.categories[2].term,
article.categories[3].term,
article.categories[0].term,
article.categories[1].term,
format_href_to_discord(article.summary.unwrap().content.as_str())
))
.timestamp(Timestamp::from(article.updated.unwrap())))
)
}
} else {
task_err("RSS:ESXi", &format!("Article term does not match the expected RegEx pattern! ({})", article.categories[3].term.as_str()));
Ok(None)
}
}
async fn gportal_embed() -> Result<Option<CreateEmbed>, Error> {
let redis = get_redis().await;
let rkey = "RSS_GPortal";
let url = "https://status.g-portal.com/history.atom";
let res = fetch_feed(url).await?;
let data = res.text().await?;
let cursor = Cursor::new(data);
let feed = parse(cursor).unwrap();
let incident_page = feed.links[0].clone().href;
let article = feed.entries[0].clone();
fn get_incident_id(input: &str) -> Option<String> {
let re = Regex::new(r#"/incidents/([a-zA-Z0-9]+)$"#).unwrap();
if let Some(caps) = re.captures(input) {
Some(caps[1].to_string())
} else {
None
}
}
// This is for building up the embed with the feed data
// std::fs::File::create("gportal.log").unwrap();
// std::fs::write("gportal.log", format!("{:#?}", feed))?;
let cached_incident = redis.get(&rkey).await.unwrap().unwrap_or_default();
if cached_incident.is_empty() {
redis.set(&rkey, get_incident_id(&article.links[0].href).unwrap().as_str()).await.unwrap();
if let Err(y) = redis.expire(&rkey, REDIS_EXPIRY_SECS).await {
task_err("RSS", format!("[RedisExpiry]: {}", y).as_str());
}
return Ok(None);
}
if let Some(incident) = get_incident_id(&article.links[0].href) {
if incident == cached_incident {
return Ok(None);
} else {
save_to_redis(&rkey, &incident).await?;
Ok(Some(CreateEmbed::new()
.color(0xC23EE8)
.title(article.title.unwrap().content)
.url(incident_page)
.description(format!("{}", format_html_to_discord(article.content.unwrap().body.unwrap())
))
.timestamp(Timestamp::from(article.updated.unwrap())))
)
}
} else {
task_err("RSS:GPortal", &format!("Incident ID does not match the expected RegEx pattern! ({})", &article.links[0].href));
Ok(None)
}
}
async fn rust_message() -> Result<Option<String>, Error> {
let redis = get_redis().await;
let rkey = "RSS_RustBlog";
let url = "https://blog.rust-lang.org/feed.xml";
let res = fetch_feed(url).await?;
let data = res.text().await?;
let cursor = Cursor::new(data);
let feed = parse(cursor).unwrap();
let article = feed.entries[0].clone();
let article_id = article.id.clone();
fn get_blog_title(input: String) -> Option<String> {
let re = Regex::new(r"https://blog\.rust-lang\.org/(\d{4}/\d{2}/\d{2}/[^/]+)").unwrap();
re.captures(input.as_str()).and_then(|caps| caps.get(1).map(|m| m.as_str().to_string()))
}
// This is for building up the message with the feed data
// std::fs::File::create("rustblog.log").unwrap();
// std::fs::write("rustblog.log", format!("{:#?}", feed))?;
let cached_blog = redis.get(&rkey).await.unwrap().unwrap_or_default();
if cached_blog.is_empty() {
redis.set(&rkey, get_blog_title(article.id).unwrap().as_str()).await.unwrap();
if let Err(y) = redis.expire(&rkey, REDIS_EXPIRY_SECS).await {
task_err("RSS", format!("[RedisExpiry]: {}", y).as_str());
}
return Ok(None);
}
if let Some(blog) = get_blog_title(article.id) {
if blog == cached_blog {
return Ok(None);
} else {
save_to_redis(&rkey, &blog).await?;
Ok(Some(format!("Rust Team has put out a new article!\n[{}](<{}>)", article.links[0].title.clone().unwrap(), article.links[0].href)))
}
} else {
task_err("RSS:RustBlog", &format!("Article URL does not match the expected RegEx pattern! ({})", article_id));
Ok(None)
}
}
pub async fn rss(ctx: Arc<Context>) -> Result<(), Error> {
let task_name = "RSS";
let mut interval = interval(Duration::from_secs(900));
task_info(&task_name, "Task loaded!");
loop {
interval.tick().await;
match esxi_embed().await {
Ok(Some(embed)) => {
ChannelId::new(BINARY_PROPERTIES.rss_channel).send_message(&ctx.http, CreateMessage::new().add_embed(embed)).await.unwrap();
},
Ok(None) => (),
Err(y) => task_err(&task_name, &y.to_string())
}
match gportal_embed().await {
Ok(Some(embed)) => {
ChannelId::new(BINARY_PROPERTIES.rss_channel).send_message(&ctx.http, CreateMessage::new()
.content("*Uh-oh! G-Portal is having issues!*").add_embed(embed)
).await.unwrap();
},
Ok(None) => (),
Err(y) => task_err(&task_name, &y.to_string())
}
match rust_message().await {
Ok(Some(content)) => {
ChannelId::new(BINARY_PROPERTIES.rss_channel).send_message(&ctx.http, CreateMessage::new().content(content)).await.unwrap();
},
Ok(None) => (),
Err(y) => task_err(&task_name, &y.to_string())
}
}
}

View File

@ -15,7 +15,10 @@ use crate::{
// controllers::database::DatabaseController
};
use std::error;
use std::{
thread::current,
sync::Arc
};
use poise::serenity_prelude::{
builder::{
CreateMessage,
@ -31,7 +34,7 @@ use poise::serenity_prelude::{
GatewayIntents
};
type Error = Box<dyn error::Error + Send + Sync>;
type Error = Box<dyn std::error::Error + Send + Sync>;
async fn on_ready(
ctx: &Context,
@ -77,7 +80,7 @@ async fn on_ready(
}
async fn event_processor(
_ctx: &Context,
ctx: &Context,
event: &FullEvent,
_framework: poise::FrameworkContext<'_, (), Error>
) -> Result<(), Error> {
@ -85,6 +88,25 @@ async fn event_processor(
FullEvent::Ratelimit { data } => {
println!("Event[Ratelimit]: {:#?}", data);
}
FullEvent::Ready { .. } => {
let thread_id = format!("{:?}", current().id());
let thread_num: String = thread_id.chars().filter(|c| c.is_digit(10)).collect();
println!("Event[Ready]: Task Scheduler operating on thread {}", thread_num);
let ctx = Arc::new(ctx.clone());
tokio::spawn(async move {
match internals::tasks::rss::rss(ctx).await {
Ok(_) => {},
Err(y) => {
eprintln!("TaskScheduler[Main:RSS:Error]: Task execution failed: {}", y);
if let Some(source) = y.source() {
eprintln!("TaskScheduler[Main:RSS:Error]: Task execution failed caused by: {:#?}", source);
}
}
}
});
}
_ => {}
}