Compare commits

..

No commits in common. "develop" and "1.0.0" have entirely different histories.

25 changed files with 533 additions and 1155 deletions

View File

@ -1,32 +0,0 @@
# Include any files or directories that you don't want to be copied to your
# container here (e.g., local build artifacts, temporary files, etc.).
#
# For more help, visit the .dockerignore file reference guide at
# https://docs.docker.com/go/build-context-dockerignore/
**/.DS_Store
**/.classpath
**/.dockerignore
**/.env
**/.git
**/.gitignore
**/.project
**/.settings
**/.toolstarget
**/.vs
**/.vscode
**/*.*proj.user
**/*.dbmdl
**/*.jfm
**/charts
**/docker-compose*
**/compose*
**/Dockerfile*
**/node_modules
**/npm-debug.log
**/secrets.dev.yaml
**/values.dev.yaml
/bin
/target
LICENSE
README.md

View File

@ -1 +1,2 @@
DISCORD_TOKEN=changeme DISCORD_TOKEN=changeme
DATABASE_URL=sqlite:p4bl0t.db

1
.gitignore vendored
View File

@ -1,3 +1,4 @@
/target /target
/.env /.env
*.db *.db
/.sqlx/

View File

@ -1,12 +0,0 @@
{
"db_name": "SQLite",
"query": "\nINSERT INTO guild_log_channels (guild_id, channel_id)\nVALUES ( ?1, ?2 )",
"describe": {
"columns": [],
"parameters": {
"Right": 2
},
"nullable": []
},
"hash": "5b44991d1514160fa00572e398f0577ad44f839a0470f9eeb89da8b5e77f0e03"
}

View File

@ -1,20 +0,0 @@
{
"db_name": "SQLite",
"query": "\nSELECT channel_id\nFROM guild_log_channels\nWHERE guild_id = ?1",
"describe": {
"columns": [
{
"name": "channel_id",
"ordinal": 0,
"type_info": "Int64"
}
],
"parameters": {
"Right": 1
},
"nullable": [
false
]
},
"hash": "8444f7b7452a5ace6352aef943274f8a345a958257d896c7658b7700557959ab"
}

View File

@ -1,12 +0,0 @@
{
"db_name": "SQLite",
"query": "\nDELETE FROM guild_log_channels\nWHERE guild_id = ?1 AND channel_id = ?2",
"describe": {
"columns": [],
"parameters": {
"Right": 2
},
"nullable": []
},
"hash": "d6e9f422d6ae29a00658f55165018119d1e13d407266440415dfcc17a97ba00e"
}

View File

@ -1,34 +0,0 @@
# Changelog
All notable changes to this project will be documented in this file.
## [unreleased]
### Documentation
- Add Changelog
## [1.0.1] - 2024-01-18
### Features
- Dockerize p4bl0t
### Miscellaneous Tasks
- Update bot framework
### Refactor
- Simplify code, better organize it, and comment it
## [1.0.0] - 2023-11-23
### Features
- Add a channel as a logging channel
- Add listing logger channels in a guild
- Unset a channel as a logger
- Send in logger channels mentions to everyone
<!-- generated by git-cliff -->

974
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@ -1,6 +1,6 @@
[package] [package]
name = "p4bl0t" name = "p4bl0t"
version = "1.0.1" version = "1.0.0"
edition = "2021" edition = "2021"
authors = ["Lucien Cartier-Tilet <lucien@phundrak.com>"] authors = ["Lucien Cartier-Tilet <lucien@phundrak.com>"]
license-file = "LICENSE.md" license-file = "LICENSE.md"
@ -10,12 +10,12 @@ homepage = "https://github.com/phundrak/p4bl0t"
repository = "https://github.com/phundrak/p4bl0t" repository = "https://github.com/phundrak/p4bl0t"
keywords = ["discord", "bot", "logging"] keywords = ["discord", "bot", "logging"]
publish = false publish = false
build = "build.rs"
[dependencies] [dependencies]
color-eyre = "0.6.2" color-eyre = "0.6.2"
poise = { version = "0.6.1" } dotenvy = "0.15.7"
sqlx = { version = "0.7.3", features = ["sqlite", "tls-rustls", "runtime-tokio-rustls"] } poise = { version = "0.5.7" }
sqlx = { version = "0.7.2", features = ["sqlite", "tls-rustls", "runtime-tokio-rustls"] }
tokio = { version = "1.34.0", features = ["macros", "rt-multi-thread"] } tokio = { version = "1.34.0", features = ["macros", "rt-multi-thread"] }
tracing = "0.1.40" tracing = "0.1.40"
tracing-subscriber = "0.3.18" tracing-subscriber = "0.3.18"

View File

@ -1,93 +0,0 @@
# syntax=docker/dockerfile:1
# Comments are provided throughout this file to help you get started.
# If you need more help, visit the Dockerfile reference guide at
# https://docs.docker.com/go/dockerfile-reference/
ARG RUST_VERSION=1.73.0
ARG APP_NAME=p4bl0t
################################################################################
# xx is a helper for cross-compilation.
# See https://github.com/tonistiigi/xx/ for more information.
FROM --platform=$BUILDPLATFORM tonistiigi/xx:1.3.0 AS xx
################################################################################
# Create a stage for building the application.
FROM --platform=$BUILDPLATFORM rust:${RUST_VERSION}-alpine AS build
ARG APP_NAME
WORKDIR /app
# Copy cross compilation utilities from the xx stage.
COPY --from=xx / /
# Install host build dependencies.
RUN apk add --no-cache clang lld musl-dev git file
# This is the architecture youre building for, which is passed in by the builder.
# Placing it here allows the previous steps to be cached across architectures.
ARG TARGETPLATFORM
# Install cross compilation build dependencies.
RUN xx-apk add --no-cache musl-dev gcc
# Build the application.
# Leverage a cache mount to /usr/local/cargo/registry/
# for downloaded dependencies, a cache mount to /usr/local/cargo/git/db
# for git repository dependencies, and a cache mount to /app/target/ for
# compiled dependencies which will speed up subsequent builds.
# Leverage a bind mount to the src directory to avoid having to copy the
# source code into the container. Once built, copy the executable to an
# output directory before the cache mounted /app/target is unmounted.
RUN --mount=type=bind,source=src,target=src \
--mount=type=bind,source=Cargo.toml,target=Cargo.toml \
--mount=type=bind,source=Cargo.lock,target=Cargo.lock \
--mount=type=bind,source=build.rs,target=build.rs \
--mount=type=bind,source=.sqlx,target=.sqlx \
--mount=type=bind,source=migrations,target=migrations \
--mount=type=cache,target=/app/target/,id=rust-cache-${APP_NAME}-${TARGETPLATFORM} \
--mount=type=cache,target=/usr/local/cargo/git/db \
--mount=type=cache,target=/usr/local/cargo/registry/ \
<<EOF
set -e
# xx-cargo build --locked --release --target-dir ./target
xx-cargo build --locked --target-dir ./target
cp ./target/$(xx-cargo --print-target-triple)/debug/$APP_NAME /bin/server
xx-verify /bin/server
EOF
################################################################################
# Create a new stage for running the application that contains the minimal
# runtime dependencies for the application. This often uses a different base
# image from the build stage where the necessary files are copied from the build
# stage.
#
# The example below uses the alpine image as the foundation for running the app.
# By specifying the "3.18" tag, it will use version 3.18 of alpine. If
# reproducability is important, consider using a digest
# (e.g., alpine@sha256:664888ac9cfd28068e062c991ebcff4b4c7307dc8dd4df9e728bedde5c449d91).
FROM alpine:3.18 AS final
# Create a non-privileged user that the app will run under.
# See https://docs.docker.com/go/dockerfile-user-best-practices/
ARG UID=10001
RUN adduser \
--disabled-password \
--gecos "" \
--home "/nonexistent" \
--shell "/sbin/nologin" \
--no-create-home \
--uid "${UID}" \
appuser
WORKDIR /app
RUN chown -R appuser /app
USER appuser
# Copy the executable from the "build" stage.
COPY --from=build /bin/server /bin/
# Expose the port that the application listens on.
# EXPOSE 8080
# What the container should run when it is started.
CMD ["/bin/server"]

View File

@ -3,24 +3,11 @@
p4bl0t is a simple logging bot for Discord written in Rust. p4bl0t is a simple logging bot for Discord written in Rust.
## Usage ## Usage
### Preparation
In order to run p4bl0t, you will need a Discord token with which your In order to run p4bl0t, head over to your [developer
bot will authenticate. Head over to your [developer
portal](https://discord.com/developers) on Discords website, and portal](https://discord.com/developers) on Discords website, and
create a bot there. You will be able to get the bots token there. create a bot there. Then, copy the `.env.example` file to a `.env`
file and fill in the details.
### Docker
The easiest way to run p4bl0t is using Docker. Copy
`docker-compose.example.yml` to `docker-compose.yml` and modify the
`DISCORD_TOKEN` variable.
Then, you can simply run
```sh
docker compose up # or docker-compose on some machines
```
### Building and running it yourself
Copy the `.env.example` file to a `.env` file and fill in the details.
```sh ```sh
cp .env.example .env cp .env.example .env
emacs .env emacs .env
@ -40,6 +27,7 @@ cargo install sqlx-cli
Setup your SQLite database. Setup your SQLite database.
```sh ```sh
export DATABASE_URL=<your-database-url> # should be the same as in the .env file
sqlx database create sqlx database create
sqlx migrate run sqlx migrate run
``` ```

View File

@ -1,5 +0,0 @@
// generated by `sqlx migrate build-script`
fn main() {
// trigger recompilation when a new migration is added
println!("cargo:rerun-if-changed=migrations");
}

View File

@ -1,84 +0,0 @@
# git-cliff ~ default configuration file
# https://git-cliff.org/docs/configuration
#
# Lines starting with "#" are comments.
# Configuration options are organized into tables and keys.
# See documentation for more information on available options.
[changelog]
# changelog header
header = """
# Changelog\n
All notable changes to this project will be documented in this file.\n
"""
# template for the changelog body
# https://keats.github.io/tera/docs/#introduction
body = """
{% if version %}\
## [{{ version | trim_start_matches(pat="v") }}] - {{ timestamp | date(format="%Y-%m-%d") }}
{% else %}\
## [unreleased]
{% endif %}\
{% for group, commits in commits | group_by(attribute="group") %}
### {{ group | upper_first }}
{% for commit in commits %}
- {% if commit.breaking %}[**breaking**] {% endif %}{{ commit.message | upper_first }}\
{% endfor %}
{% endfor %}\n
"""
# remove the leading and trailing whitespace from the template
trim = true
# changelog footer
footer = """
<!-- generated by git-cliff -->
"""
# postprocessors
postprocessors = [
# { pattern = '<REPO>', replace = "https://github.com/orhun/git-cliff" }, # replace repository URL
]
[git]
# parse the commits based on https://www.conventionalcommits.org
conventional_commits = true
# filter out the commits that are not conventional
filter_unconventional = true
# process each line of a commit as an individual commit
split_commits = false
# regex for preprocessing the commit messages
commit_preprocessors = [
# { pattern = '\((\w+\s)?#([0-9]+)\)', replace = "([#${2}](<REPO>/issues/${2}))"}, # replace issue numbers
]
# regex for parsing and grouping commits
commit_parsers = [
{ message = "^feat", group = "Features" },
{ message = "^fix", group = "Bug Fixes" },
{ message = "^doc", group = "Documentation" },
{ message = "^perf", group = "Performance" },
{ message = "^refactor", group = "Refactor" },
{ message = "^style", group = "Styling" },
{ message = "^test", group = "Testing" },
{ message = "^chore\\(release\\): prepare for", skip = true },
{ message = "^chore: bump (version )?to", skip = true },
{ message = "^chore\\(deps\\)", skip = true },
{ message = "^chore\\(pr\\)", skip = true },
{ message = "^chore\\(pull\\)", skip = true },
{ message = "^chore|ci", group = "Miscellaneous Tasks" },
{ body = ".*security", group = "Security" },
{ message = "^revert", group = "Revert" },
]
# protect breaking changes from being skipped due to matching a skipping commit_parser
protect_breaking_commits = false
# filter out the commits that are not matched by commit parsers
filter_commits = false
# regex for matching git tags
tag_pattern = "[0-9].*"
# regex for skipping tags
skip_tags = "v0.1.0-beta.1"
# regex for ignoring tags
ignore_tags = ""
# sort the tags topologically
topo_order = false
# sort the commits inside sections by oldest/newest order
sort_commits = "oldest"
# limit the number of commits included in the changelog.
# limit_commits = 42

View File

@ -1,9 +0,0 @@
services:
p4bl0t:
build:
context: .
target: final
environment:
DISCORD_TOKEN: changeme
volumes:
- ./p4bl0t.db:/app/p4bl0t.db

View File

@ -1,92 +1,66 @@
#![allow(clippy::cast_possible_wrap, clippy::cast_sign_loss)] #![allow(clippy::cast_possible_wrap, clippy::cast_sign_loss)]
use std::env;
use poise::serenity_prelude::{ChannelId, GuildId}; use poise::serenity_prelude::{ChannelId, GuildId};
use sqlx::{migrate::MigrateDatabase, Sqlite, SqlitePool}; use sqlx::SqlitePool;
use tracing::{error, info, debug}; use tracing::error;
pub type Result<T> = ::std::result::Result<T, sqlx::Error>; pub type Result<T> = ::std::result::Result<T, sqlx::Error>;
pub struct Database(SqlitePool); pub struct Database {
pool: SqlitePool,
}
impl Database { impl Database {
/// Initialize Sqlite database.
///
/// The Sqlite database should already exist and have its
/// migrations already executed.
///
/// # Errors
///
/// This function will return an error if the Sqlite pool fails to
/// create.
pub async fn new() -> Result<Self> { pub async fn new() -> Result<Self> {
let url = "sqlite:p4bl0t.db"; Ok(Self {
if !Sqlite::database_exists(url).await? { pool: SqlitePool::connect(
info!("Creating database"); &env::var("DATABASE_URL")
Sqlite::create_database(url).await?; .expect("Missing enviroment variable DATABASE_URL"),
info!("Database created"); )
} .await?,
debug!("Getting pool connection"); })
let pool = SqlitePool::connect(url).await?;
info!("Running migrations");
sqlx::migrate!().run(&pool).await?;
debug!("Database initialized");
Ok(Self(pool))
} }
/// Return from database all channels registered as loggers for a
/// guild.
///
/// # Errors
///
/// This function will return an error if `sqlx` does so.
pub async fn get_logging_channels( pub async fn get_logging_channels(
&self, &self,
guild_id: GuildId, guild_id: GuildId,
) -> Result<Vec<ChannelId>> { ) -> Result<Vec<u64>> {
let guild_id = guild_id.get() as i64; let guild_id = guild_id.0 as i64;
sqlx::query!( let channels = sqlx::query!(
r#" r#"
SELECT channel_id SELECT channel_id
FROM guild_log_channels FROM guild_log_channels
WHERE guild_id = ?1"#, WHERE guild_id = ?1
"#,
guild_id guild_id
) )
.fetch_all(&self.0) .fetch_all(&self.pool)
.await .await
.map_err(|e| { .map_err(|e| {
error!( error!(
"Error getting logging channels for guild {guild_id}: {e:?}" "Error getting logging channels for guild {guild_id}: {e:?}"
); );
e e
}) })?;
.map(|channels| { Ok(channels.iter().map(|id| id.channel_id as u64).collect())
channels
.iter()
.map(|id| ChannelId::new(id.channel_id as u64))
.collect()
})
} }
/// Adds a channel as a logger for a guild.
///
/// # Errors
///
/// This function will return an error if `sqlx` does so. This may
/// be either a database issue, or a channel is already registered
/// as a guild's logger, therefore violating the unicity
/// constraint for guild ID and channel ID pairs.
pub async fn set_logging_channel( pub async fn set_logging_channel(
&self, &self,
guild_id: GuildId, guild_id: GuildId,
channel_id: ChannelId, channel_id: ChannelId,
) -> Result<()> { ) -> Result<()> {
let guild_id = guild_id.get() as i64; let guild_id = guild_id.0 as i64;
let channel_id = channel_id.get() as i64; let channel_id = channel_id.0 as i64;
let mut conn = self.0.acquire().await?; let mut conn = self.pool.acquire().await?;
sqlx::query!(r#" sqlx::query!(
r#"
INSERT INTO guild_log_channels (guild_id, channel_id) INSERT INTO guild_log_channels (guild_id, channel_id)
VALUES ( ?1, ?2 )"#, VALUES ( ?1, ?2 )
"#,
guild_id, guild_id,
channel_id channel_id
) )
@ -99,25 +73,18 @@ VALUES ( ?1, ?2 )"#,
.map(|_| ()) .map(|_| ())
} }
/// Unregister a channel as a logger for a guild.
///
/// This function will return a success value even if `channel`
/// was not a logger of `guild` already.
///
/// # Errors
///
/// This function will return an error if `sqlx` does so.
pub async fn remove_logging_channel( pub async fn remove_logging_channel(
&self, &self,
guild: GuildId, guild_id: GuildId,
channel: ChannelId, channel_id: ChannelId,
) -> Result<()> { ) -> Result<()> {
let guild_id = guild.get() as i64; let guild_id = guild_id.0 as i64;
let channel_id = channel.get() as i64; let channel_id = channel_id.0 as i64;
let mut conn = self.0.acquire().await?; let mut conn = self.pool.acquire().await?;
sqlx::query!(r#" sqlx::query!(r#"
DELETE FROM guild_log_channels DELETE FROM guild_log_channels
WHERE guild_id = ?1 AND channel_id = ?2"#, WHERE guild_id = ?1 AND channel_id = ?2
"#,
guild_id, guild_id,
channel_id) channel_id)
.execute(&mut *conn) .execute(&mut *conn)

View File

@ -1,16 +1,7 @@
use super::super::{Context, Result}; use super::{Context, Result};
use poise::serenity_prelude as serenity; use super::utils::serenity;
/// Main command for logging subcommands.
///
/// This command cannot be called on its own and will do nothing by
/// itself.
///
/// # Errors
///
/// This command will never error out, even if its signature says it
/// can.
#[allow(clippy::unused_async)] #[allow(clippy::unused_async)]
#[poise::command( #[poise::command(
slash_command, slash_command,
@ -21,13 +12,8 @@ pub async fn logging(_ctx: Context<'_>) -> Result {
Ok(()) Ok(())
} }
/// Add a channel as a logger.
///
/// # Errors
///
/// This function will return an error if .
#[poise::command(slash_command)] #[poise::command(slash_command)]
async fn add_channel( pub async fn add_channel(
ctx: Context<'_>, ctx: Context<'_>,
#[description = "New logging channel"] channel: serenity::Channel, #[description = "New logging channel"] channel: serenity::Channel,
) -> Result { ) -> Result {
@ -64,16 +50,8 @@ async fn add_channel(
Ok(()) Ok(())
} }
/// List all channels registered as loggers for a guild.
///
/// This will list all channels that are logger channels in the server
/// from which the command was executed.
///
/// # Errors
///
/// This function will return an error if the database returns one.
#[poise::command(slash_command)] #[poise::command(slash_command)]
async fn list_channels(ctx: Context<'_>) -> Result { pub async fn list_channels(ctx: Context<'_>) -> Result {
let response = match ctx.guild_id() { let response = match ctx.guild_id() {
None => "Error: Could not determine the guild's ID".to_owned(), None => "Error: Could not determine the guild's ID".to_owned(),
Some(guild_id) => { Some(guild_id) => {
@ -100,18 +78,8 @@ async fn list_channels(ctx: Context<'_>) -> Result {
Ok(()) Ok(())
} }
/// Remove a channel as a logger in a guild.
///
/// This will remove a channel from the list of logger channels in the
/// guild from which the command was executed. If the channel is not a
/// logger, the bot will still consider unsetting the channel as a
/// logger a success.
///
/// # Errors
///
/// This function will return an error if the database errors.
#[poise::command(slash_command)] #[poise::command(slash_command)]
async fn remove_channel( pub async fn remove_channel(
ctx: Context<'_>, ctx: Context<'_>,
#[description = "Logger channel to remove"] channel: serenity::Channel, #[description = "Logger channel to remove"] channel: serenity::Channel,
) -> Result { ) -> Result {

View File

@ -1,3 +0,0 @@
mod logging;
pub(crate) use logging::logging;

View File

@ -1,24 +0,0 @@
use std::error::Error as StdError;
use std::fmt::{self, Display};
#[derive(Debug, Clone, Copy)]
pub enum Error {
GuildIdNotFound,
}
impl Error {
pub fn boxed(self) -> Box<Self> {
Box::new(self)
}
}
impl Display for Error {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
// write!(f, "")
match self {
Self::GuildIdNotFound => write!(f, "Guild ID not found!"),
}
}
}
impl StdError for Error {}

72
src/discord/events.rs Normal file
View File

@ -0,0 +1,72 @@
use crate::db::Database;
use super::{utils::BotData, Error, Result};
use poise::{serenity_prelude as serenity, Event};
use tracing::{error, info};
async fn handle_everyone_mention(
ctx: &serenity::Context,
database: &Database,
message: &serenity::Message,
) -> Result {
use serenity::ChannelId;
if let Some(guild_id) = message.guild_id {
if message.mention_everyone {
let author = message.author.clone();
let message_channel = message.channel_id;
let channels: Vec<ChannelId> = database
.get_logging_channels(guild_id)
.await?
.iter()
.map(|channel_id| serenity::ChannelId(channel_id.to_owned()))
.collect();
for channel in &channels {
channel
.send_message(&ctx, |m| {
m.embed(|e| {
e.title("Someone mentioned everyone!")
.field("Author", author.clone(), true)
.field(
"When",
message.timestamp.naive_local().to_string(),
true,
)
.field(
"Channel",
format!("<#{message_channel}>"),
true,
)
.field("Link", format!("https://discord.com/channels/{guild_id}/{}/{}", channel.0, message.id), false)
})
})
.await
.map_err(|e| {
error!("Failed to send message: {e:?}");
e
})?;
}
}
} else {
error!("Could not determine guild id of message {message:?}");
}
Ok(())
}
pub async fn event_handler(
ctx: &serenity::Context,
event: &Event<'_>,
_framework: poise::FrameworkContext<'_, BotData, Error>,
data: &BotData,
) -> Result {
match event {
Event::Ready { data_about_bot } => {
info!("Logged in as {}", data_about_bot.user.name);
}
Event::Message { new_message } => {
handle_everyone_mention(ctx, &data.database, new_message).await?;
}
_ => {}
}
Ok(())
}

View File

@ -1,64 +0,0 @@
use crate::db::Database;
use super::super::Result;
use super::super::error::Error as DiscordError;
use poise::serenity_prelude::{self as serenity, CreateEmbed, CreateMessage};
use tracing::{error, info};
fn create_embed_for_mention(
message: &serenity::Message,
guild_id: u64,
) -> CreateEmbed {
let author_id = message.author.id.to_string();
let message_channel = message.channel_id.get();
CreateEmbed::new()
.title("Someone mentioned everyone!")
.field("Author", format!("<@{author_id}>"), true)
.field("When", message.timestamp.naive_local().to_string(), true)
.field("Channel", format!("<#{message_channel}>"), true)
.field(
"Link",
format!(
"https://discord.com/channels/{guild_id}/{message_channel}/{}",
message.id
),
false,
)
}
/// Handle messages mentioning everyone.
///
/// # Errors
///
/// This function will return an error if a message fails to be sent,
/// if retrieving the list of channels registered as loggers fails, or
/// if there is not guild ID that can be retrieved from the message.
pub async fn handle_everyone_mention(
ctx: &serenity::Context,
database: &Database,
message: &serenity::Message,
) -> Result {
info!("Message mentioning everyone: {message:?}");
if !message.mention_everyone {
return Ok(());
}
if message.guild_id.is_none() {
error!("Message without a guild_id! {message:?}");
return Err(DiscordError::GuildIdNotFound.boxed());
}
let guild_id = message.guild_id.unwrap();
let channels: Vec<serenity::ChannelId> =
database.get_logging_channels(guild_id).await?;
for channel in &channels {
// Ignore result, it'll be in the bot's logger
let embed = create_embed_for_mention(message, guild_id.get());
let builder = CreateMessage::new().embed(embed);
let _ = channel
.send_message(&ctx, builder)
.await
.map_err(|e| error!("Failed to send message: {e:?}"));
}
Ok(())
}

View File

@ -1,31 +0,0 @@
use super::{utils::BotData, Error, Result};
use poise::serenity_prelude::{self as serenity, FullEvent};
use tracing::info;
mod everyone;
use everyone::handle_everyone_mention;
/// Function handling events the bot can see.
///
/// # Errors
///
/// This function will return an error if one of the functions error
/// themselves.
pub async fn event_handler(
ctx: &serenity::Context,
event: &FullEvent,
_framework: poise::FrameworkContext<'_, BotData, Error>,
data: &BotData,
) -> Result {
match event {
FullEvent::Ready { data_about_bot } => {
info!("Logged in as {}", data_about_bot.user.name);
}
FullEvent::Message { new_message } => {
handle_everyone_mention(ctx, &data.database, new_message).await?;
}
_ => {}
}
Ok(())
}

View File

@ -1,9 +1,8 @@
mod commands; mod commands;
pub mod error;
mod events; mod events;
pub mod utils; pub mod utils;
use poise::serenity_prelude::ClientBuilder; use poise::FrameworkBuilder;
use utils::serenity; use utils::serenity;
use commands::logging; use commands::logging;
@ -13,15 +12,8 @@ use self::events::event_handler;
pub type Result = ::std::result::Result<(), Error>; pub type Result = ::std::result::Result<(), Error>;
/// Bootstraps the Discord bot. pub fn make_bot() -> FrameworkBuilder<BotData, Error> {
/// poise::Framework::builder()
/// # Panics
///
/// Panics if the environment `DISCORD_TOKEN` is unavailable.
pub fn make_bot() -> ClientBuilder {
let intents = serenity::GatewayIntents::non_privileged();
let token = std::env::var("DISCORD_TOKEN").expect("missing DISCORD_TOKEN");
let framework = poise::Framework::builder()
.options(poise::FrameworkOptions { .options(poise::FrameworkOptions {
commands: vec![logging()], commands: vec![logging()],
event_handler: |ctx, event, framework, data| { event_handler: |ctx, event, framework, data| {
@ -29,6 +21,8 @@ pub fn make_bot() -> ClientBuilder {
}, },
..Default::default() ..Default::default()
}) })
.token(std::env::var("DISCORD_TOKEN").expect("missing DISCORD_TOKEN"))
.intents(serenity::GatewayIntents::non_privileged())
.setup(|ctx, _ready, framework| { .setup(|ctx, _ready, framework| {
Box::pin(async move { Box::pin(async move {
poise::builtins::register_globally( poise::builtins::register_globally(
@ -39,6 +33,4 @@ pub fn make_bot() -> ClientBuilder {
Ok(BotData::new().await?) Ok(BotData::new().await?)
}) })
}) })
.build();
ClientBuilder::new(token, intents).framework(framework)
} }

View File

@ -6,14 +6,6 @@ pub struct BotData {
} }
impl BotData { impl BotData {
/// Initialize state data for bot.
///
/// For now, this only includes a connector to its database.
///
/// # Errors
///
/// This function will return an error if the database fails to
/// initialize.
pub async fn new() -> color_eyre::Result<Self> { pub async fn new() -> color_eyre::Result<Self> {
Ok(Self { Ok(Self {
database: Database::new().await?, database: Database::new().await?,

View File

@ -1,18 +1,19 @@
#![warn(clippy::style, clippy::pedantic)] #![warn(clippy::style, clippy::pedantic)]
mod utils;
mod db; mod db;
mod discord; mod discord;
mod utils;
use std::error::Error; use std::error::Error;
#[tokio::main] #[tokio::main]
async fn main() -> Result<(), Box<dyn Error>> { async fn main() -> Result<(), Box<dyn Error>> {
utils::setup_logging(); dotenvy::dotenv()?;
color_eyre::install()?; color_eyre::install()?;
utils::setup_logging();
let mut bot = discord::make_bot().await?; let bot = discord::make_bot();
bot.start().await?; bot.run().await?;
Ok(()) Ok(())
} }

View File

@ -1,11 +1,6 @@
use tracing::Level; use tracing::Level;
use tracing_subscriber::FmtSubscriber; use tracing_subscriber::FmtSubscriber;
/// Initialize logging for the project.
///
/// # Panics
///
/// Panics if the logger fails to initialize.
pub fn setup_logging() { pub fn setup_logging() {
let subscriber = FmtSubscriber::builder() let subscriber = FmtSubscriber::builder()
.with_max_level(Level::INFO) .with_max_level(Level::INFO)