Compare commits

...

11 Commits

Author SHA1 Message Date
8af4c43b65 Merge pull request 'docs: add Changelog' (#22) from feature/changelog into develop
Reviewed-on: #22
2024-01-18 22:44:45 +00:00
cc7fb4d028
docs: add Changelog 2024-01-18 23:41:07 +01:00
128b0aa6c6 Merge pull request 'chore: bump to 1.0.1' (#20) from feature/1.0.1 into develop
Reviewed-on: #20
2024-01-18 22:23:53 +00:00
ba0938672f
chore: bump to 1.0.1 2024-01-18 23:22:31 +01:00
e32f8b4439 Merge pull request 'chore: update bot framework' (#19) from fix/cve into develop
Reviewed-on: #19
2024-01-18 22:12:51 +00:00
844940da00
chore: update bot framework
This commit updates the crate poise to its latest version
2024-01-18 20:47:37 +01:00
d17252b338 Merge pull request 'feat: dockerize p4bl0t' (#17) from feature/dockerized into develop
Reviewed-on: #17
2024-01-18 01:51:55 +00:00
4789ffd34d
feat: dockerize p4bl0t
This commit removes DATABASE_URL variable in favour of a fixed name.
The project won’t panic anymore if this variable isn’t set. This
removes the need for dotenvy.

It also adds the necessary files to dockerize the application.

Update instructions in README on how to run the project.

Add possibility to compile the project without a database available.

Closes #8
2024-01-18 02:50:40 +01:00
d789ea7e74 Merge pull request 'refactor: simplify code, better organize it, and comment it' (#16) from feature/refactorization into develop
Reviewed-on: #16
2023-11-25 22:33:50 +00:00
d6b208963d
refactor: simplify code, better organize it, and comment it 2023-11-25 23:33:06 +01:00
75cd5dd7cb Merge pull request 'chore: bump version to 1.0.0' (#14) from feature/release-1.0 into develop
Reviewed-on: #14
2023-11-23 23:04:53 +00:00
25 changed files with 1154 additions and 532 deletions

32
.dockerignore Normal file
View File

@ -0,0 +1,32 @@
# Include any files or directories that you don't want to be copied to your
# container here (e.g., local build artifacts, temporary files, etc.).
#
# For more help, visit the .dockerignore file reference guide at
# https://docs.docker.com/go/build-context-dockerignore/
**/.DS_Store
**/.classpath
**/.dockerignore
**/.env
**/.git
**/.gitignore
**/.project
**/.settings
**/.toolstarget
**/.vs
**/.vscode
**/*.*proj.user
**/*.dbmdl
**/*.jfm
**/charts
**/docker-compose*
**/compose*
**/Dockerfile*
**/node_modules
**/npm-debug.log
**/secrets.dev.yaml
**/values.dev.yaml
/bin
/target
LICENSE
README.md

View File

@ -1,2 +1 @@
DISCORD_TOKEN=changeme
DATABASE_URL=sqlite:p4bl0t.db

1
.gitignore vendored
View File

@ -1,4 +1,3 @@
/target
/.env
*.db
/.sqlx/

View File

@ -0,0 +1,12 @@
{
"db_name": "SQLite",
"query": "\nINSERT INTO guild_log_channels (guild_id, channel_id)\nVALUES ( ?1, ?2 )",
"describe": {
"columns": [],
"parameters": {
"Right": 2
},
"nullable": []
},
"hash": "5b44991d1514160fa00572e398f0577ad44f839a0470f9eeb89da8b5e77f0e03"
}

View File

@ -0,0 +1,20 @@
{
"db_name": "SQLite",
"query": "\nSELECT channel_id\nFROM guild_log_channels\nWHERE guild_id = ?1",
"describe": {
"columns": [
{
"name": "channel_id",
"ordinal": 0,
"type_info": "Int64"
}
],
"parameters": {
"Right": 1
},
"nullable": [
false
]
},
"hash": "8444f7b7452a5ace6352aef943274f8a345a958257d896c7658b7700557959ab"
}

View File

@ -0,0 +1,12 @@
{
"db_name": "SQLite",
"query": "\nDELETE FROM guild_log_channels\nWHERE guild_id = ?1 AND channel_id = ?2",
"describe": {
"columns": [],
"parameters": {
"Right": 2
},
"nullable": []
},
"hash": "d6e9f422d6ae29a00658f55165018119d1e13d407266440415dfcc17a97ba00e"
}

34
CHANGELOG.md Normal file
View File

@ -0,0 +1,34 @@
# Changelog
All notable changes to this project will be documented in this file.
## [unreleased]
### Documentation
- Add Changelog
## [1.0.1] - 2024-01-18
### Features
- Dockerize p4bl0t
### Miscellaneous Tasks
- Update bot framework
### Refactor
- Simplify code, better organize it, and comment it
## [1.0.0] - 2023-11-23
### Features
- Add a channel as a logging channel
- Add listing logger channels in a guild
- Unset a channel as a logger
- Send in logger channels mentions to everyone
<!-- generated by git-cliff -->

972
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@ -1,6 +1,6 @@
[package]
name = "p4bl0t"
version = "1.0.0"
version = "1.0.1"
edition = "2021"
authors = ["Lucien Cartier-Tilet <lucien@phundrak.com>"]
license-file = "LICENSE.md"
@ -10,12 +10,12 @@ homepage = "https://github.com/phundrak/p4bl0t"
repository = "https://github.com/phundrak/p4bl0t"
keywords = ["discord", "bot", "logging"]
publish = false
build = "build.rs"
[dependencies]
color-eyre = "0.6.2"
dotenvy = "0.15.7"
poise = { version = "0.5.7" }
sqlx = { version = "0.7.2", features = ["sqlite", "tls-rustls", "runtime-tokio-rustls"] }
poise = { version = "0.6.1" }
sqlx = { version = "0.7.3", features = ["sqlite", "tls-rustls", "runtime-tokio-rustls"] }
tokio = { version = "1.34.0", features = ["macros", "rt-multi-thread"] }
tracing = "0.1.40"
tracing-subscriber = "0.3.18"

93
Dockerfile Normal file
View File

@ -0,0 +1,93 @@
# syntax=docker/dockerfile:1
# Comments are provided throughout this file to help you get started.
# If you need more help, visit the Dockerfile reference guide at
# https://docs.docker.com/go/dockerfile-reference/
ARG RUST_VERSION=1.73.0
ARG APP_NAME=p4bl0t
################################################################################
# xx is a helper for cross-compilation.
# See https://github.com/tonistiigi/xx/ for more information.
FROM --platform=$BUILDPLATFORM tonistiigi/xx:1.3.0 AS xx
################################################################################
# Create a stage for building the application.
FROM --platform=$BUILDPLATFORM rust:${RUST_VERSION}-alpine AS build
ARG APP_NAME
WORKDIR /app
# Copy cross compilation utilities from the xx stage.
COPY --from=xx / /
# Install host build dependencies.
RUN apk add --no-cache clang lld musl-dev git file
# This is the architecture youre building for, which is passed in by the builder.
# Placing it here allows the previous steps to be cached across architectures.
ARG TARGETPLATFORM
# Install cross compilation build dependencies.
RUN xx-apk add --no-cache musl-dev gcc
# Build the application.
# Leverage a cache mount to /usr/local/cargo/registry/
# for downloaded dependencies, a cache mount to /usr/local/cargo/git/db
# for git repository dependencies, and a cache mount to /app/target/ for
# compiled dependencies which will speed up subsequent builds.
# Leverage a bind mount to the src directory to avoid having to copy the
# source code into the container. Once built, copy the executable to an
# output directory before the cache mounted /app/target is unmounted.
RUN --mount=type=bind,source=src,target=src \
--mount=type=bind,source=Cargo.toml,target=Cargo.toml \
--mount=type=bind,source=Cargo.lock,target=Cargo.lock \
--mount=type=bind,source=build.rs,target=build.rs \
--mount=type=bind,source=.sqlx,target=.sqlx \
--mount=type=bind,source=migrations,target=migrations \
--mount=type=cache,target=/app/target/,id=rust-cache-${APP_NAME}-${TARGETPLATFORM} \
--mount=type=cache,target=/usr/local/cargo/git/db \
--mount=type=cache,target=/usr/local/cargo/registry/ \
<<EOF
set -e
# xx-cargo build --locked --release --target-dir ./target
xx-cargo build --locked --target-dir ./target
cp ./target/$(xx-cargo --print-target-triple)/debug/$APP_NAME /bin/server
xx-verify /bin/server
EOF
################################################################################
# Create a new stage for running the application that contains the minimal
# runtime dependencies for the application. This often uses a different base
# image from the build stage where the necessary files are copied from the build
# stage.
#
# The example below uses the alpine image as the foundation for running the app.
# By specifying the "3.18" tag, it will use version 3.18 of alpine. If
# reproducability is important, consider using a digest
# (e.g., alpine@sha256:664888ac9cfd28068e062c991ebcff4b4c7307dc8dd4df9e728bedde5c449d91).
FROM alpine:3.18 AS final
# Create a non-privileged user that the app will run under.
# See https://docs.docker.com/go/dockerfile-user-best-practices/
ARG UID=10001
RUN adduser \
--disabled-password \
--gecos "" \
--home "/nonexistent" \
--shell "/sbin/nologin" \
--no-create-home \
--uid "${UID}" \
appuser
WORKDIR /app
RUN chown -R appuser /app
USER appuser
# Copy the executable from the "build" stage.
COPY --from=build /bin/server /bin/
# Expose the port that the application listens on.
# EXPOSE 8080
# What the container should run when it is started.
CMD ["/bin/server"]

View File

@ -3,11 +3,24 @@
p4bl0t is a simple logging bot for Discord written in Rust.
## Usage
In order to run p4bl0t, head over to your [developer
### Preparation
In order to run p4bl0t, you will need a Discord token with which your
bot will authenticate. Head over to your [developer
portal](https://discord.com/developers) on Discords website, and
create a bot there. Then, copy the `.env.example` file to a `.env`
file and fill in the details.
create a bot there. You will be able to get the bots token there.
### Docker
The easiest way to run p4bl0t is using Docker. Copy
`docker-compose.example.yml` to `docker-compose.yml` and modify the
`DISCORD_TOKEN` variable.
Then, you can simply run
```sh
docker compose up # or docker-compose on some machines
```
### Building and running it yourself
Copy the `.env.example` file to a `.env` file and fill in the details.
```sh
cp .env.example .env
emacs .env
@ -27,7 +40,6 @@ cargo install sqlx-cli
Setup your SQLite database.
```sh
export DATABASE_URL=<your-database-url> # should be the same as in the .env file
sqlx database create
sqlx migrate run
```

5
build.rs Normal file
View File

@ -0,0 +1,5 @@
// generated by `sqlx migrate build-script`
fn main() {
// trigger recompilation when a new migration is added
println!("cargo:rerun-if-changed=migrations");
}

84
cliff.toml Normal file
View File

@ -0,0 +1,84 @@
# git-cliff ~ default configuration file
# https://git-cliff.org/docs/configuration
#
# Lines starting with "#" are comments.
# Configuration options are organized into tables and keys.
# See documentation for more information on available options.
[changelog]
# changelog header
header = """
# Changelog\n
All notable changes to this project will be documented in this file.\n
"""
# template for the changelog body
# https://keats.github.io/tera/docs/#introduction
body = """
{% if version %}\
## [{{ version | trim_start_matches(pat="v") }}] - {{ timestamp | date(format="%Y-%m-%d") }}
{% else %}\
## [unreleased]
{% endif %}\
{% for group, commits in commits | group_by(attribute="group") %}
### {{ group | upper_first }}
{% for commit in commits %}
- {% if commit.breaking %}[**breaking**] {% endif %}{{ commit.message | upper_first }}\
{% endfor %}
{% endfor %}\n
"""
# remove the leading and trailing whitespace from the template
trim = true
# changelog footer
footer = """
<!-- generated by git-cliff -->
"""
# postprocessors
postprocessors = [
# { pattern = '<REPO>', replace = "https://github.com/orhun/git-cliff" }, # replace repository URL
]
[git]
# parse the commits based on https://www.conventionalcommits.org
conventional_commits = true
# filter out the commits that are not conventional
filter_unconventional = true
# process each line of a commit as an individual commit
split_commits = false
# regex for preprocessing the commit messages
commit_preprocessors = [
# { pattern = '\((\w+\s)?#([0-9]+)\)', replace = "([#${2}](<REPO>/issues/${2}))"}, # replace issue numbers
]
# regex for parsing and grouping commits
commit_parsers = [
{ message = "^feat", group = "Features" },
{ message = "^fix", group = "Bug Fixes" },
{ message = "^doc", group = "Documentation" },
{ message = "^perf", group = "Performance" },
{ message = "^refactor", group = "Refactor" },
{ message = "^style", group = "Styling" },
{ message = "^test", group = "Testing" },
{ message = "^chore\\(release\\): prepare for", skip = true },
{ message = "^chore: bump (version )?to", skip = true },
{ message = "^chore\\(deps\\)", skip = true },
{ message = "^chore\\(pr\\)", skip = true },
{ message = "^chore\\(pull\\)", skip = true },
{ message = "^chore|ci", group = "Miscellaneous Tasks" },
{ body = ".*security", group = "Security" },
{ message = "^revert", group = "Revert" },
]
# protect breaking changes from being skipped due to matching a skipping commit_parser
protect_breaking_commits = false
# filter out the commits that are not matched by commit parsers
filter_commits = false
# regex for matching git tags
tag_pattern = "[0-9].*"
# regex for skipping tags
skip_tags = "v0.1.0-beta.1"
# regex for ignoring tags
ignore_tags = ""
# sort the tags topologically
topo_order = false
# sort the commits inside sections by oldest/newest order
sort_commits = "oldest"
# limit the number of commits included in the changelog.
# limit_commits = 42

View File

@ -0,0 +1,9 @@
services:
p4bl0t:
build:
context: .
target: final
environment:
DISCORD_TOKEN: changeme
volumes:
- ./p4bl0t.db:/app/p4bl0t.db

View File

@ -1,66 +1,92 @@
#![allow(clippy::cast_possible_wrap, clippy::cast_sign_loss)]
use std::env;
use poise::serenity_prelude::{ChannelId, GuildId};
use sqlx::SqlitePool;
use tracing::error;
use sqlx::{migrate::MigrateDatabase, Sqlite, SqlitePool};
use tracing::{error, info, debug};
pub type Result<T> = ::std::result::Result<T, sqlx::Error>;
pub struct Database {
pool: SqlitePool,
}
pub struct Database(SqlitePool);
impl Database {
/// Initialize Sqlite database.
///
/// The Sqlite database should already exist and have its
/// migrations already executed.
///
/// # Errors
///
/// This function will return an error if the Sqlite pool fails to
/// create.
pub async fn new() -> Result<Self> {
Ok(Self {
pool: SqlitePool::connect(
&env::var("DATABASE_URL")
.expect("Missing enviroment variable DATABASE_URL"),
)
.await?,
})
let url = "sqlite:p4bl0t.db";
if !Sqlite::database_exists(url).await? {
info!("Creating database");
Sqlite::create_database(url).await?;
info!("Database created");
}
debug!("Getting pool connection");
let pool = SqlitePool::connect(url).await?;
info!("Running migrations");
sqlx::migrate!().run(&pool).await?;
debug!("Database initialized");
Ok(Self(pool))
}
/// Return from database all channels registered as loggers for a
/// guild.
///
/// # Errors
///
/// This function will return an error if `sqlx` does so.
pub async fn get_logging_channels(
&self,
guild_id: GuildId,
) -> Result<Vec<u64>> {
let guild_id = guild_id.0 as i64;
let channels = sqlx::query!(
) -> Result<Vec<ChannelId>> {
let guild_id = guild_id.get() as i64;
sqlx::query!(
r#"
SELECT channel_id
FROM guild_log_channels
WHERE guild_id = ?1
"#,
WHERE guild_id = ?1"#,
guild_id
)
.fetch_all(&self.pool)
.fetch_all(&self.0)
.await
.map_err(|e| {
error!(
"Error getting logging channels for guild {guild_id}: {e:?}"
);
e
})?;
Ok(channels.iter().map(|id| id.channel_id as u64).collect())
})
.map(|channels| {
channels
.iter()
.map(|id| ChannelId::new(id.channel_id as u64))
.collect()
})
}
/// Adds a channel as a logger for a guild.
///
/// # Errors
///
/// This function will return an error if `sqlx` does so. This may
/// be either a database issue, or a channel is already registered
/// as a guild's logger, therefore violating the unicity
/// constraint for guild ID and channel ID pairs.
pub async fn set_logging_channel(
&self,
guild_id: GuildId,
channel_id: ChannelId,
) -> Result<()> {
let guild_id = guild_id.0 as i64;
let channel_id = channel_id.0 as i64;
let mut conn = self.pool.acquire().await?;
let guild_id = guild_id.get() as i64;
let channel_id = channel_id.get() as i64;
let mut conn = self.0.acquire().await?;
sqlx::query!(
r#"
sqlx::query!(r#"
INSERT INTO guild_log_channels (guild_id, channel_id)
VALUES ( ?1, ?2 )
"#,
VALUES ( ?1, ?2 )"#,
guild_id,
channel_id
)
@ -73,18 +99,25 @@ VALUES ( ?1, ?2 )
.map(|_| ())
}
/// Unregister a channel as a logger for a guild.
///
/// This function will return a success value even if `channel`
/// was not a logger of `guild` already.
///
/// # Errors
///
/// This function will return an error if `sqlx` does so.
pub async fn remove_logging_channel(
&self,
guild_id: GuildId,
channel_id: ChannelId,
guild: GuildId,
channel: ChannelId,
) -> Result<()> {
let guild_id = guild_id.0 as i64;
let channel_id = channel_id.0 as i64;
let mut conn = self.pool.acquire().await?;
let guild_id = guild.get() as i64;
let channel_id = channel.get() as i64;
let mut conn = self.0.acquire().await?;
sqlx::query!(r#"
DELETE FROM guild_log_channels
WHERE guild_id = ?1 AND channel_id = ?2
"#,
WHERE guild_id = ?1 AND channel_id = ?2"#,
guild_id,
channel_id)
.execute(&mut *conn)

View File

@ -1,7 +1,16 @@
use super::{Context, Result};
use super::super::{Context, Result};
use super::utils::serenity;
use poise::serenity_prelude as serenity;
/// Main command for logging subcommands.
///
/// This command cannot be called on its own and will do nothing by
/// itself.
///
/// # Errors
///
/// This command will never error out, even if its signature says it
/// can.
#[allow(clippy::unused_async)]
#[poise::command(
slash_command,
@ -12,8 +21,13 @@ pub async fn logging(_ctx: Context<'_>) -> Result {
Ok(())
}
/// Add a channel as a logger.
///
/// # Errors
///
/// This function will return an error if .
#[poise::command(slash_command)]
pub async fn add_channel(
async fn add_channel(
ctx: Context<'_>,
#[description = "New logging channel"] channel: serenity::Channel,
) -> Result {
@ -50,8 +64,16 @@ pub async fn add_channel(
Ok(())
}
/// List all channels registered as loggers for a guild.
///
/// This will list all channels that are logger channels in the server
/// from which the command was executed.
///
/// # Errors
///
/// This function will return an error if the database returns one.
#[poise::command(slash_command)]
pub async fn list_channels(ctx: Context<'_>) -> Result {
async fn list_channels(ctx: Context<'_>) -> Result {
let response = match ctx.guild_id() {
None => "Error: Could not determine the guild's ID".to_owned(),
Some(guild_id) => {
@ -78,8 +100,18 @@ pub async fn list_channels(ctx: Context<'_>) -> Result {
Ok(())
}
/// Remove a channel as a logger in a guild.
///
/// This will remove a channel from the list of logger channels in the
/// guild from which the command was executed. If the channel is not a
/// logger, the bot will still consider unsetting the channel as a
/// logger a success.
///
/// # Errors
///
/// This function will return an error if the database errors.
#[poise::command(slash_command)]
pub async fn remove_channel(
async fn remove_channel(
ctx: Context<'_>,
#[description = "Logger channel to remove"] channel: serenity::Channel,
) -> Result {

View File

@ -0,0 +1,3 @@
mod logging;
pub(crate) use logging::logging;

24
src/discord/error.rs Normal file
View File

@ -0,0 +1,24 @@
use std::error::Error as StdError;
use std::fmt::{self, Display};
#[derive(Debug, Clone, Copy)]
pub enum Error {
GuildIdNotFound,
}
impl Error {
pub fn boxed(self) -> Box<Self> {
Box::new(self)
}
}
impl Display for Error {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
// write!(f, "")
match self {
Self::GuildIdNotFound => write!(f, "Guild ID not found!"),
}
}
}
impl StdError for Error {}

View File

@ -1,72 +0,0 @@
use crate::db::Database;
use super::{utils::BotData, Error, Result};
use poise::{serenity_prelude as serenity, Event};
use tracing::{error, info};
async fn handle_everyone_mention(
ctx: &serenity::Context,
database: &Database,
message: &serenity::Message,
) -> Result {
use serenity::ChannelId;
if let Some(guild_id) = message.guild_id {
if message.mention_everyone {
let author = message.author.clone();
let message_channel = message.channel_id;
let channels: Vec<ChannelId> = database
.get_logging_channels(guild_id)
.await?
.iter()
.map(|channel_id| serenity::ChannelId(channel_id.to_owned()))
.collect();
for channel in &channels {
channel
.send_message(&ctx, |m| {
m.embed(|e| {
e.title("Someone mentioned everyone!")
.field("Author", author.clone(), true)
.field(
"When",
message.timestamp.naive_local().to_string(),
true,
)
.field(
"Channel",
format!("<#{message_channel}>"),
true,
)
.field("Link", format!("https://discord.com/channels/{guild_id}/{}/{}", channel.0, message.id), false)
})
})
.await
.map_err(|e| {
error!("Failed to send message: {e:?}");
e
})?;
}
}
} else {
error!("Could not determine guild id of message {message:?}");
}
Ok(())
}
pub async fn event_handler(
ctx: &serenity::Context,
event: &Event<'_>,
_framework: poise::FrameworkContext<'_, BotData, Error>,
data: &BotData,
) -> Result {
match event {
Event::Ready { data_about_bot } => {
info!("Logged in as {}", data_about_bot.user.name);
}
Event::Message { new_message } => {
handle_everyone_mention(ctx, &data.database, new_message).await?;
}
_ => {}
}
Ok(())
}

View File

@ -0,0 +1,64 @@
use crate::db::Database;
use super::super::Result;
use super::super::error::Error as DiscordError;
use poise::serenity_prelude::{self as serenity, CreateEmbed, CreateMessage};
use tracing::{error, info};
fn create_embed_for_mention(
message: &serenity::Message,
guild_id: u64,
) -> CreateEmbed {
let author_id = message.author.id.to_string();
let message_channel = message.channel_id.get();
CreateEmbed::new()
.title("Someone mentioned everyone!")
.field("Author", format!("<@{author_id}>"), true)
.field("When", message.timestamp.naive_local().to_string(), true)
.field("Channel", format!("<#{message_channel}>"), true)
.field(
"Link",
format!(
"https://discord.com/channels/{guild_id}/{message_channel}/{}",
message.id
),
false,
)
}
/// Handle messages mentioning everyone.
///
/// # Errors
///
/// This function will return an error if a message fails to be sent,
/// if retrieving the list of channels registered as loggers fails, or
/// if there is not guild ID that can be retrieved from the message.
pub async fn handle_everyone_mention(
ctx: &serenity::Context,
database: &Database,
message: &serenity::Message,
) -> Result {
info!("Message mentioning everyone: {message:?}");
if !message.mention_everyone {
return Ok(());
}
if message.guild_id.is_none() {
error!("Message without a guild_id! {message:?}");
return Err(DiscordError::GuildIdNotFound.boxed());
}
let guild_id = message.guild_id.unwrap();
let channels: Vec<serenity::ChannelId> =
database.get_logging_channels(guild_id).await?;
for channel in &channels {
// Ignore result, it'll be in the bot's logger
let embed = create_embed_for_mention(message, guild_id.get());
let builder = CreateMessage::new().embed(embed);
let _ = channel
.send_message(&ctx, builder)
.await
.map_err(|e| error!("Failed to send message: {e:?}"));
}
Ok(())
}

31
src/discord/events/mod.rs Normal file
View File

@ -0,0 +1,31 @@
use super::{utils::BotData, Error, Result};
use poise::serenity_prelude::{self as serenity, FullEvent};
use tracing::info;
mod everyone;
use everyone::handle_everyone_mention;
/// Function handling events the bot can see.
///
/// # Errors
///
/// This function will return an error if one of the functions error
/// themselves.
pub async fn event_handler(
ctx: &serenity::Context,
event: &FullEvent,
_framework: poise::FrameworkContext<'_, BotData, Error>,
data: &BotData,
) -> Result {
match event {
FullEvent::Ready { data_about_bot } => {
info!("Logged in as {}", data_about_bot.user.name);
}
FullEvent::Message { new_message } => {
handle_everyone_mention(ctx, &data.database, new_message).await?;
}
_ => {}
}
Ok(())
}

View File

@ -1,8 +1,9 @@
mod commands;
pub mod error;
mod events;
pub mod utils;
use poise::FrameworkBuilder;
use poise::serenity_prelude::ClientBuilder;
use utils::serenity;
use commands::logging;
@ -12,8 +13,15 @@ use self::events::event_handler;
pub type Result = ::std::result::Result<(), Error>;
pub fn make_bot() -> FrameworkBuilder<BotData, Error> {
poise::Framework::builder()
/// Bootstraps the Discord bot.
///
/// # Panics
///
/// Panics if the environment `DISCORD_TOKEN` is unavailable.
pub fn make_bot() -> ClientBuilder {
let intents = serenity::GatewayIntents::non_privileged();
let token = std::env::var("DISCORD_TOKEN").expect("missing DISCORD_TOKEN");
let framework = poise::Framework::builder()
.options(poise::FrameworkOptions {
commands: vec![logging()],
event_handler: |ctx, event, framework, data| {
@ -21,8 +29,6 @@ pub fn make_bot() -> FrameworkBuilder<BotData, Error> {
},
..Default::default()
})
.token(std::env::var("DISCORD_TOKEN").expect("missing DISCORD_TOKEN"))
.intents(serenity::GatewayIntents::non_privileged())
.setup(|ctx, _ready, framework| {
Box::pin(async move {
poise::builtins::register_globally(
@ -33,4 +39,6 @@ pub fn make_bot() -> FrameworkBuilder<BotData, Error> {
Ok(BotData::new().await?)
})
})
.build();
ClientBuilder::new(token, intents).framework(framework)
}

View File

@ -6,6 +6,14 @@ pub struct BotData {
}
impl BotData {
/// Initialize state data for bot.
///
/// For now, this only includes a connector to its database.
///
/// # Errors
///
/// This function will return an error if the database fails to
/// initialize.
pub async fn new() -> color_eyre::Result<Self> {
Ok(Self {
database: Database::new().await?,

View File

@ -1,19 +1,18 @@
#![warn(clippy::style, clippy::pedantic)]
mod utils;
mod db;
mod discord;
mod utils;
use std::error::Error;
#[tokio::main]
async fn main() -> Result<(), Box<dyn Error>> {
dotenvy::dotenv()?;
color_eyre::install()?;
utils::setup_logging();
color_eyre::install()?;
let bot = discord::make_bot();
bot.run().await?;
let mut bot = discord::make_bot().await?;
bot.start().await?;
Ok(())
}

View File

@ -1,6 +1,11 @@
use tracing::Level;
use tracing_subscriber::FmtSubscriber;
/// Initialize logging for the project.
///
/// # Panics
///
/// Panics if the logger fails to initialize.
pub fn setup_logging() {
let subscriber = FmtSubscriber::builder()
.with_max_level(Level::INFO)