Compare commits

...

9 Commits

Author SHA1 Message Date
8af4c43b65 Merge pull request 'docs: add Changelog' (#22) from feature/changelog into develop
Reviewed-on: #22
2024-01-18 22:44:45 +00:00
cc7fb4d028
docs: add Changelog 2024-01-18 23:41:07 +01:00
128b0aa6c6 Merge pull request 'chore: bump to 1.0.1' (#20) from feature/1.0.1 into develop
Reviewed-on: #20
2024-01-18 22:23:53 +00:00
ba0938672f
chore: bump to 1.0.1 2024-01-18 23:22:31 +01:00
e32f8b4439 Merge pull request 'chore: update bot framework' (#19) from fix/cve into develop
Reviewed-on: #19
2024-01-18 22:12:51 +00:00
844940da00
chore: update bot framework
This commit updates the crate poise to its latest version
2024-01-18 20:47:37 +01:00
d17252b338 Merge pull request 'feat: dockerize p4bl0t' (#17) from feature/dockerized into develop
Reviewed-on: #17
2024-01-18 01:51:55 +00:00
4789ffd34d
feat: dockerize p4bl0t
This commit removes DATABASE_URL variable in favour of a fixed name.
The project won’t panic anymore if this variable isn’t set. This
removes the need for dotenvy.

It also adds the necessary files to dockerize the application.

Update instructions in README on how to run the project.

Add possibility to compile the project without a database available.

Closes #8
2024-01-18 02:50:40 +01:00
d789ea7e74 Merge pull request 'refactor: simplify code, better organize it, and comment it' (#16) from feature/refactorization into develop
Reviewed-on: #16
2023-11-25 22:33:50 +00:00
19 changed files with 902 additions and 453 deletions

32
.dockerignore Normal file
View File

@ -0,0 +1,32 @@
# Include any files or directories that you don't want to be copied to your
# container here (e.g., local build artifacts, temporary files, etc.).
#
# For more help, visit the .dockerignore file reference guide at
# https://docs.docker.com/go/build-context-dockerignore/
**/.DS_Store
**/.classpath
**/.dockerignore
**/.env
**/.git
**/.gitignore
**/.project
**/.settings
**/.toolstarget
**/.vs
**/.vscode
**/*.*proj.user
**/*.dbmdl
**/*.jfm
**/charts
**/docker-compose*
**/compose*
**/Dockerfile*
**/node_modules
**/npm-debug.log
**/secrets.dev.yaml
**/values.dev.yaml
/bin
/target
LICENSE
README.md

View File

@ -1,2 +1 @@
DISCORD_TOKEN=changeme DISCORD_TOKEN=changeme
DATABASE_URL=sqlite:p4bl0t.db

1
.gitignore vendored
View File

@ -1,4 +1,3 @@
/target /target
/.env /.env
*.db *.db
/.sqlx/

View File

@ -0,0 +1,12 @@
{
"db_name": "SQLite",
"query": "\nINSERT INTO guild_log_channels (guild_id, channel_id)\nVALUES ( ?1, ?2 )",
"describe": {
"columns": [],
"parameters": {
"Right": 2
},
"nullable": []
},
"hash": "5b44991d1514160fa00572e398f0577ad44f839a0470f9eeb89da8b5e77f0e03"
}

View File

@ -0,0 +1,20 @@
{
"db_name": "SQLite",
"query": "\nSELECT channel_id\nFROM guild_log_channels\nWHERE guild_id = ?1",
"describe": {
"columns": [
{
"name": "channel_id",
"ordinal": 0,
"type_info": "Int64"
}
],
"parameters": {
"Right": 1
},
"nullable": [
false
]
},
"hash": "8444f7b7452a5ace6352aef943274f8a345a958257d896c7658b7700557959ab"
}

View File

@ -0,0 +1,12 @@
{
"db_name": "SQLite",
"query": "\nDELETE FROM guild_log_channels\nWHERE guild_id = ?1 AND channel_id = ?2",
"describe": {
"columns": [],
"parameters": {
"Right": 2
},
"nullable": []
},
"hash": "d6e9f422d6ae29a00658f55165018119d1e13d407266440415dfcc17a97ba00e"
}

34
CHANGELOG.md Normal file
View File

@ -0,0 +1,34 @@
# Changelog
All notable changes to this project will be documented in this file.
## [unreleased]
### Documentation
- Add Changelog
## [1.0.1] - 2024-01-18
### Features
- Dockerize p4bl0t
### Miscellaneous Tasks
- Update bot framework
### Refactor
- Simplify code, better organize it, and comment it
## [1.0.0] - 2023-11-23
### Features
- Add a channel as a logging channel
- Add listing logger channels in a guild
- Unset a channel as a logger
- Send in logger channels mentions to everyone
<!-- generated by git-cliff -->

921
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@ -1,6 +1,6 @@
[package] [package]
name = "p4bl0t" name = "p4bl0t"
version = "1.0.0" version = "1.0.1"
edition = "2021" edition = "2021"
authors = ["Lucien Cartier-Tilet <lucien@phundrak.com>"] authors = ["Lucien Cartier-Tilet <lucien@phundrak.com>"]
license-file = "LICENSE.md" license-file = "LICENSE.md"
@ -10,12 +10,12 @@ homepage = "https://github.com/phundrak/p4bl0t"
repository = "https://github.com/phundrak/p4bl0t" repository = "https://github.com/phundrak/p4bl0t"
keywords = ["discord", "bot", "logging"] keywords = ["discord", "bot", "logging"]
publish = false publish = false
build = "build.rs"
[dependencies] [dependencies]
color-eyre = "0.6.2" color-eyre = "0.6.2"
dotenvy = "0.15.7" poise = { version = "0.6.1" }
poise = { version = "0.5.7" } sqlx = { version = "0.7.3", features = ["sqlite", "tls-rustls", "runtime-tokio-rustls"] }
sqlx = { version = "0.7.2", features = ["sqlite", "tls-rustls", "runtime-tokio-rustls"] }
tokio = { version = "1.34.0", features = ["macros", "rt-multi-thread"] } tokio = { version = "1.34.0", features = ["macros", "rt-multi-thread"] }
tracing = "0.1.40" tracing = "0.1.40"
tracing-subscriber = "0.3.18" tracing-subscriber = "0.3.18"

93
Dockerfile Normal file
View File

@ -0,0 +1,93 @@
# syntax=docker/dockerfile:1
# Comments are provided throughout this file to help you get started.
# If you need more help, visit the Dockerfile reference guide at
# https://docs.docker.com/go/dockerfile-reference/
ARG RUST_VERSION=1.73.0
ARG APP_NAME=p4bl0t
################################################################################
# xx is a helper for cross-compilation.
# See https://github.com/tonistiigi/xx/ for more information.
FROM --platform=$BUILDPLATFORM tonistiigi/xx:1.3.0 AS xx
################################################################################
# Create a stage for building the application.
FROM --platform=$BUILDPLATFORM rust:${RUST_VERSION}-alpine AS build
ARG APP_NAME
WORKDIR /app
# Copy cross compilation utilities from the xx stage.
COPY --from=xx / /
# Install host build dependencies.
RUN apk add --no-cache clang lld musl-dev git file
# This is the architecture youre building for, which is passed in by the builder.
# Placing it here allows the previous steps to be cached across architectures.
ARG TARGETPLATFORM
# Install cross compilation build dependencies.
RUN xx-apk add --no-cache musl-dev gcc
# Build the application.
# Leverage a cache mount to /usr/local/cargo/registry/
# for downloaded dependencies, a cache mount to /usr/local/cargo/git/db
# for git repository dependencies, and a cache mount to /app/target/ for
# compiled dependencies which will speed up subsequent builds.
# Leverage a bind mount to the src directory to avoid having to copy the
# source code into the container. Once built, copy the executable to an
# output directory before the cache mounted /app/target is unmounted.
RUN --mount=type=bind,source=src,target=src \
--mount=type=bind,source=Cargo.toml,target=Cargo.toml \
--mount=type=bind,source=Cargo.lock,target=Cargo.lock \
--mount=type=bind,source=build.rs,target=build.rs \
--mount=type=bind,source=.sqlx,target=.sqlx \
--mount=type=bind,source=migrations,target=migrations \
--mount=type=cache,target=/app/target/,id=rust-cache-${APP_NAME}-${TARGETPLATFORM} \
--mount=type=cache,target=/usr/local/cargo/git/db \
--mount=type=cache,target=/usr/local/cargo/registry/ \
<<EOF
set -e
# xx-cargo build --locked --release --target-dir ./target
xx-cargo build --locked --target-dir ./target
cp ./target/$(xx-cargo --print-target-triple)/debug/$APP_NAME /bin/server
xx-verify /bin/server
EOF
################################################################################
# Create a new stage for running the application that contains the minimal
# runtime dependencies for the application. This often uses a different base
# image from the build stage where the necessary files are copied from the build
# stage.
#
# The example below uses the alpine image as the foundation for running the app.
# By specifying the "3.18" tag, it will use version 3.18 of alpine. If
# reproducability is important, consider using a digest
# (e.g., alpine@sha256:664888ac9cfd28068e062c991ebcff4b4c7307dc8dd4df9e728bedde5c449d91).
FROM alpine:3.18 AS final
# Create a non-privileged user that the app will run under.
# See https://docs.docker.com/go/dockerfile-user-best-practices/
ARG UID=10001
RUN adduser \
--disabled-password \
--gecos "" \
--home "/nonexistent" \
--shell "/sbin/nologin" \
--no-create-home \
--uid "${UID}" \
appuser
WORKDIR /app
RUN chown -R appuser /app
USER appuser
# Copy the executable from the "build" stage.
COPY --from=build /bin/server /bin/
# Expose the port that the application listens on.
# EXPOSE 8080
# What the container should run when it is started.
CMD ["/bin/server"]

View File

@ -3,11 +3,24 @@
p4bl0t is a simple logging bot for Discord written in Rust. p4bl0t is a simple logging bot for Discord written in Rust.
## Usage ## Usage
### Preparation
In order to run p4bl0t, head over to your [developer In order to run p4bl0t, you will need a Discord token with which your
bot will authenticate. Head over to your [developer
portal](https://discord.com/developers) on Discords website, and portal](https://discord.com/developers) on Discords website, and
create a bot there. Then, copy the `.env.example` file to a `.env` create a bot there. You will be able to get the bots token there.
file and fill in the details.
### Docker
The easiest way to run p4bl0t is using Docker. Copy
`docker-compose.example.yml` to `docker-compose.yml` and modify the
`DISCORD_TOKEN` variable.
Then, you can simply run
```sh
docker compose up # or docker-compose on some machines
```
### Building and running it yourself
Copy the `.env.example` file to a `.env` file and fill in the details.
```sh ```sh
cp .env.example .env cp .env.example .env
emacs .env emacs .env
@ -27,7 +40,6 @@ cargo install sqlx-cli
Setup your SQLite database. Setup your SQLite database.
```sh ```sh
export DATABASE_URL=<your-database-url> # should be the same as in the .env file
sqlx database create sqlx database create
sqlx migrate run sqlx migrate run
``` ```

5
build.rs Normal file
View File

@ -0,0 +1,5 @@
// generated by `sqlx migrate build-script`
fn main() {
// trigger recompilation when a new migration is added
println!("cargo:rerun-if-changed=migrations");
}

84
cliff.toml Normal file
View File

@ -0,0 +1,84 @@
# git-cliff ~ default configuration file
# https://git-cliff.org/docs/configuration
#
# Lines starting with "#" are comments.
# Configuration options are organized into tables and keys.
# See documentation for more information on available options.
[changelog]
# changelog header
header = """
# Changelog\n
All notable changes to this project will be documented in this file.\n
"""
# template for the changelog body
# https://keats.github.io/tera/docs/#introduction
body = """
{% if version %}\
## [{{ version | trim_start_matches(pat="v") }}] - {{ timestamp | date(format="%Y-%m-%d") }}
{% else %}\
## [unreleased]
{% endif %}\
{% for group, commits in commits | group_by(attribute="group") %}
### {{ group | upper_first }}
{% for commit in commits %}
- {% if commit.breaking %}[**breaking**] {% endif %}{{ commit.message | upper_first }}\
{% endfor %}
{% endfor %}\n
"""
# remove the leading and trailing whitespace from the template
trim = true
# changelog footer
footer = """
<!-- generated by git-cliff -->
"""
# postprocessors
postprocessors = [
# { pattern = '<REPO>', replace = "https://github.com/orhun/git-cliff" }, # replace repository URL
]
[git]
# parse the commits based on https://www.conventionalcommits.org
conventional_commits = true
# filter out the commits that are not conventional
filter_unconventional = true
# process each line of a commit as an individual commit
split_commits = false
# regex for preprocessing the commit messages
commit_preprocessors = [
# { pattern = '\((\w+\s)?#([0-9]+)\)', replace = "([#${2}](<REPO>/issues/${2}))"}, # replace issue numbers
]
# regex for parsing and grouping commits
commit_parsers = [
{ message = "^feat", group = "Features" },
{ message = "^fix", group = "Bug Fixes" },
{ message = "^doc", group = "Documentation" },
{ message = "^perf", group = "Performance" },
{ message = "^refactor", group = "Refactor" },
{ message = "^style", group = "Styling" },
{ message = "^test", group = "Testing" },
{ message = "^chore\\(release\\): prepare for", skip = true },
{ message = "^chore: bump (version )?to", skip = true },
{ message = "^chore\\(deps\\)", skip = true },
{ message = "^chore\\(pr\\)", skip = true },
{ message = "^chore\\(pull\\)", skip = true },
{ message = "^chore|ci", group = "Miscellaneous Tasks" },
{ body = ".*security", group = "Security" },
{ message = "^revert", group = "Revert" },
]
# protect breaking changes from being skipped due to matching a skipping commit_parser
protect_breaking_commits = false
# filter out the commits that are not matched by commit parsers
filter_commits = false
# regex for matching git tags
tag_pattern = "[0-9].*"
# regex for skipping tags
skip_tags = "v0.1.0-beta.1"
# regex for ignoring tags
ignore_tags = ""
# sort the tags topologically
topo_order = false
# sort the commits inside sections by oldest/newest order
sort_commits = "oldest"
# limit the number of commits included in the changelog.
# limit_commits = 42

View File

@ -0,0 +1,9 @@
services:
p4bl0t:
build:
context: .
target: final
environment:
DISCORD_TOKEN: changeme
volumes:
- ./p4bl0t.db:/app/p4bl0t.db

View File

@ -1,10 +1,8 @@
#![allow(clippy::cast_possible_wrap, clippy::cast_sign_loss)] #![allow(clippy::cast_possible_wrap, clippy::cast_sign_loss)]
use std::env;
use poise::serenity_prelude::{ChannelId, GuildId}; use poise::serenity_prelude::{ChannelId, GuildId};
use sqlx::SqlitePool; use sqlx::{migrate::MigrateDatabase, Sqlite, SqlitePool};
use tracing::error; use tracing::{error, info, debug};
pub type Result<T> = ::std::result::Result<T, sqlx::Error>; pub type Result<T> = ::std::result::Result<T, sqlx::Error>;
@ -16,22 +14,23 @@ impl Database {
/// The Sqlite database should already exist and have its /// The Sqlite database should already exist and have its
/// migrations already executed. /// migrations already executed.
/// ///
/// # Panics
///
/// Panics if the environment variable `DATABASE_URL` is not set.
///
/// # Errors /// # Errors
/// ///
/// This function will return an error if the Sqlite pool fails to /// This function will return an error if the Sqlite pool fails to
/// create. /// create.
pub async fn new() -> Result<Self> { pub async fn new() -> Result<Self> {
Ok(Self( let url = "sqlite:p4bl0t.db";
SqlitePool::connect( if !Sqlite::database_exists(url).await? {
&env::var("DATABASE_URL") info!("Creating database");
.expect("Missing enviroment variable DATABASE_URL"), Sqlite::create_database(url).await?;
) info!("Database created");
.await?, }
)) debug!("Getting pool connection");
let pool = SqlitePool::connect(url).await?;
info!("Running migrations");
sqlx::migrate!().run(&pool).await?;
debug!("Database initialized");
Ok(Self(pool))
} }
/// Return from database all channels registered as loggers for a /// Return from database all channels registered as loggers for a
@ -44,7 +43,7 @@ impl Database {
&self, &self,
guild_id: GuildId, guild_id: GuildId,
) -> Result<Vec<ChannelId>> { ) -> Result<Vec<ChannelId>> {
let guild_id = guild_id.0 as i64; let guild_id = guild_id.get() as i64;
sqlx::query!( sqlx::query!(
r#" r#"
SELECT channel_id SELECT channel_id
@ -63,7 +62,7 @@ WHERE guild_id = ?1"#,
.map(|channels| { .map(|channels| {
channels channels
.iter() .iter()
.map(|id| ChannelId(id.channel_id as u64)) .map(|id| ChannelId::new(id.channel_id as u64))
.collect() .collect()
}) })
} }
@ -81,8 +80,8 @@ WHERE guild_id = ?1"#,
guild_id: GuildId, guild_id: GuildId,
channel_id: ChannelId, channel_id: ChannelId,
) -> Result<()> { ) -> Result<()> {
let guild_id = guild_id.0 as i64; let guild_id = guild_id.get() as i64;
let channel_id = channel_id.0 as i64; let channel_id = channel_id.get() as i64;
let mut conn = self.0.acquire().await?; let mut conn = self.0.acquire().await?;
sqlx::query!(r#" sqlx::query!(r#"
@ -113,8 +112,8 @@ VALUES ( ?1, ?2 )"#,
guild: GuildId, guild: GuildId,
channel: ChannelId, channel: ChannelId,
) -> Result<()> { ) -> Result<()> {
let guild_id = guild.0 as i64; let guild_id = guild.get() as i64;
let channel_id = channel.0 as i64; let channel_id = channel.get() as i64;
let mut conn = self.0.acquire().await?; let mut conn = self.0.acquire().await?;
sqlx::query!(r#" sqlx::query!(r#"
DELETE FROM guild_log_channels DELETE FROM guild_log_channels

View File

@ -4,19 +4,18 @@ use super::super::Result;
use super::super::error::Error as DiscordError; use super::super::error::Error as DiscordError;
use poise::serenity_prelude::{self as serenity, CreateEmbed}; use poise::serenity_prelude::{self as serenity, CreateEmbed, CreateMessage};
use tracing::{error, info}; use tracing::{error, info};
fn message_for_everyone_mention( fn create_embed_for_mention(
embed: &mut CreateEmbed,
message: &serenity::Message, message: &serenity::Message,
guild_id: u64, guild_id: u64,
) { ) -> CreateEmbed {
let author = message.author.clone(); let author_id = message.author.id.to_string();
let message_channel = message.channel_id.0; let message_channel = message.channel_id.get();
embed CreateEmbed::new()
.title("Someone mentioned everyone!") .title("Someone mentioned everyone!")
.field("Author", author.clone(), true) .field("Author", format!("<@{author_id}>"), true)
.field("When", message.timestamp.naive_local().to_string(), true) .field("When", message.timestamp.naive_local().to_string(), true)
.field("Channel", format!("<#{message_channel}>"), true) .field("Channel", format!("<#{message_channel}>"), true)
.field( .field(
@ -26,7 +25,7 @@ fn message_for_everyone_mention(
message.id message.id
), ),
false, false,
); )
} }
/// Handle messages mentioning everyone. /// Handle messages mentioning everyone.
@ -54,13 +53,10 @@ pub async fn handle_everyone_mention(
database.get_logging_channels(guild_id).await?; database.get_logging_channels(guild_id).await?;
for channel in &channels { for channel in &channels {
// Ignore result, it'll be in the bot's logger // Ignore result, it'll be in the bot's logger
let embed = create_embed_for_mention(message, guild_id.get());
let builder = CreateMessage::new().embed(embed);
let _ = channel let _ = channel
.send_message(&ctx, |m| { .send_message(&ctx, builder)
m.embed(|e| {
message_for_everyone_mention(e, message, guild_id.0);
e
})
})
.await .await
.map_err(|e| error!("Failed to send message: {e:?}")); .map_err(|e| error!("Failed to send message: {e:?}"));
} }

View File

@ -1,9 +1,6 @@
use super::{utils::BotData, Error, Result}; use super::{utils::BotData, Error, Result};
use poise::{ use poise::serenity_prelude::{self as serenity, FullEvent};
serenity_prelude::{self as serenity},
Event,
};
use tracing::info; use tracing::info;
mod everyone; mod everyone;
@ -17,15 +14,15 @@ use everyone::handle_everyone_mention;
/// themselves. /// themselves.
pub async fn event_handler( pub async fn event_handler(
ctx: &serenity::Context, ctx: &serenity::Context,
event: &Event<'_>, event: &FullEvent,
_framework: poise::FrameworkContext<'_, BotData, Error>, _framework: poise::FrameworkContext<'_, BotData, Error>,
data: &BotData, data: &BotData,
) -> Result { ) -> Result {
match event { match event {
Event::Ready { data_about_bot } => { FullEvent::Ready { data_about_bot } => {
info!("Logged in as {}", data_about_bot.user.name); info!("Logged in as {}", data_about_bot.user.name);
} }
Event::Message { new_message } => { FullEvent::Message { new_message } => {
handle_everyone_mention(ctx, &data.database, new_message).await?; handle_everyone_mention(ctx, &data.database, new_message).await?;
} }
_ => {} _ => {}

View File

@ -1,9 +1,9 @@
mod commands; mod commands;
pub mod error;
mod events; mod events;
pub mod utils; pub mod utils;
pub mod error;
use poise::FrameworkBuilder; use poise::serenity_prelude::ClientBuilder;
use utils::serenity; use utils::serenity;
use commands::logging; use commands::logging;
@ -18,8 +18,10 @@ pub type Result = ::std::result::Result<(), Error>;
/// # Panics /// # Panics
/// ///
/// Panics if the environment `DISCORD_TOKEN` is unavailable. /// Panics if the environment `DISCORD_TOKEN` is unavailable.
pub fn make_bot() -> FrameworkBuilder<BotData, Error> { pub fn make_bot() -> ClientBuilder {
poise::Framework::builder() let intents = serenity::GatewayIntents::non_privileged();
let token = std::env::var("DISCORD_TOKEN").expect("missing DISCORD_TOKEN");
let framework = poise::Framework::builder()
.options(poise::FrameworkOptions { .options(poise::FrameworkOptions {
commands: vec![logging()], commands: vec![logging()],
event_handler: |ctx, event, framework, data| { event_handler: |ctx, event, framework, data| {
@ -27,8 +29,6 @@ pub fn make_bot() -> FrameworkBuilder<BotData, Error> {
}, },
..Default::default() ..Default::default()
}) })
.token(std::env::var("DISCORD_TOKEN").expect("missing DISCORD_TOKEN"))
.intents(serenity::GatewayIntents::non_privileged())
.setup(|ctx, _ready, framework| { .setup(|ctx, _ready, framework| {
Box::pin(async move { Box::pin(async move {
poise::builtins::register_globally( poise::builtins::register_globally(
@ -39,4 +39,6 @@ pub fn make_bot() -> FrameworkBuilder<BotData, Error> {
Ok(BotData::new().await?) Ok(BotData::new().await?)
}) })
}) })
.build();
ClientBuilder::new(token, intents).framework(framework)
} }

View File

@ -1,19 +1,18 @@
#![warn(clippy::style, clippy::pedantic)] #![warn(clippy::style, clippy::pedantic)]
mod utils;
mod db; mod db;
mod discord; mod discord;
mod utils;
use std::error::Error; use std::error::Error;
#[tokio::main] #[tokio::main]
async fn main() -> Result<(), Box<dyn Error>> { async fn main() -> Result<(), Box<dyn Error>> {
dotenvy::dotenv()?;
color_eyre::install()?;
utils::setup_logging(); utils::setup_logging();
color_eyre::install()?;
let bot = discord::make_bot(); let mut bot = discord::make_bot().await?;
bot.run().await?; bot.start().await?;
Ok(()) Ok(())
} }