Compare commits
9 Commits
Author | SHA1 | Date | |
---|---|---|---|
8af4c43b65 | |||
cc7fb4d028 | |||
128b0aa6c6 | |||
ba0938672f | |||
e32f8b4439 | |||
844940da00 | |||
d17252b338 | |||
4789ffd34d | |||
d789ea7e74 |
32
.dockerignore
Normal file
32
.dockerignore
Normal file
@ -0,0 +1,32 @@
|
||||
# Include any files or directories that you don't want to be copied to your
|
||||
# container here (e.g., local build artifacts, temporary files, etc.).
|
||||
#
|
||||
# For more help, visit the .dockerignore file reference guide at
|
||||
# https://docs.docker.com/go/build-context-dockerignore/
|
||||
|
||||
**/.DS_Store
|
||||
**/.classpath
|
||||
**/.dockerignore
|
||||
**/.env
|
||||
**/.git
|
||||
**/.gitignore
|
||||
**/.project
|
||||
**/.settings
|
||||
**/.toolstarget
|
||||
**/.vs
|
||||
**/.vscode
|
||||
**/*.*proj.user
|
||||
**/*.dbmdl
|
||||
**/*.jfm
|
||||
**/charts
|
||||
**/docker-compose*
|
||||
**/compose*
|
||||
**/Dockerfile*
|
||||
**/node_modules
|
||||
**/npm-debug.log
|
||||
**/secrets.dev.yaml
|
||||
**/values.dev.yaml
|
||||
/bin
|
||||
/target
|
||||
LICENSE
|
||||
README.md
|
@ -1,2 +1 @@
|
||||
DISCORD_TOKEN=changeme
|
||||
DATABASE_URL=sqlite:p4bl0t.db
|
||||
|
1
.gitignore
vendored
1
.gitignore
vendored
@ -1,4 +1,3 @@
|
||||
/target
|
||||
/.env
|
||||
*.db
|
||||
/.sqlx/
|
||||
|
@ -0,0 +1,12 @@
|
||||
{
|
||||
"db_name": "SQLite",
|
||||
"query": "\nINSERT INTO guild_log_channels (guild_id, channel_id)\nVALUES ( ?1, ?2 )",
|
||||
"describe": {
|
||||
"columns": [],
|
||||
"parameters": {
|
||||
"Right": 2
|
||||
},
|
||||
"nullable": []
|
||||
},
|
||||
"hash": "5b44991d1514160fa00572e398f0577ad44f839a0470f9eeb89da8b5e77f0e03"
|
||||
}
|
@ -0,0 +1,20 @@
|
||||
{
|
||||
"db_name": "SQLite",
|
||||
"query": "\nSELECT channel_id\nFROM guild_log_channels\nWHERE guild_id = ?1",
|
||||
"describe": {
|
||||
"columns": [
|
||||
{
|
||||
"name": "channel_id",
|
||||
"ordinal": 0,
|
||||
"type_info": "Int64"
|
||||
}
|
||||
],
|
||||
"parameters": {
|
||||
"Right": 1
|
||||
},
|
||||
"nullable": [
|
||||
false
|
||||
]
|
||||
},
|
||||
"hash": "8444f7b7452a5ace6352aef943274f8a345a958257d896c7658b7700557959ab"
|
||||
}
|
@ -0,0 +1,12 @@
|
||||
{
|
||||
"db_name": "SQLite",
|
||||
"query": "\nDELETE FROM guild_log_channels\nWHERE guild_id = ?1 AND channel_id = ?2",
|
||||
"describe": {
|
||||
"columns": [],
|
||||
"parameters": {
|
||||
"Right": 2
|
||||
},
|
||||
"nullable": []
|
||||
},
|
||||
"hash": "d6e9f422d6ae29a00658f55165018119d1e13d407266440415dfcc17a97ba00e"
|
||||
}
|
34
CHANGELOG.md
Normal file
34
CHANGELOG.md
Normal file
@ -0,0 +1,34 @@
|
||||
# Changelog
|
||||
|
||||
All notable changes to this project will be documented in this file.
|
||||
|
||||
## [unreleased]
|
||||
|
||||
### Documentation
|
||||
|
||||
- Add Changelog
|
||||
|
||||
## [1.0.1] - 2024-01-18
|
||||
|
||||
### Features
|
||||
|
||||
- Dockerize p4bl0t
|
||||
|
||||
### Miscellaneous Tasks
|
||||
|
||||
- Update bot framework
|
||||
|
||||
### Refactor
|
||||
|
||||
- Simplify code, better organize it, and comment it
|
||||
|
||||
## [1.0.0] - 2023-11-23
|
||||
|
||||
### Features
|
||||
|
||||
- Add a channel as a logging channel
|
||||
- Add listing logger channels in a guild
|
||||
- Unset a channel as a logger
|
||||
- Send in logger channels mentions to everyone
|
||||
|
||||
<!-- generated by git-cliff -->
|
921
Cargo.lock
generated
921
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "p4bl0t"
|
||||
version = "1.0.0"
|
||||
version = "1.0.1"
|
||||
edition = "2021"
|
||||
authors = ["Lucien Cartier-Tilet <lucien@phundrak.com>"]
|
||||
license-file = "LICENSE.md"
|
||||
@ -10,12 +10,12 @@ homepage = "https://github.com/phundrak/p4bl0t"
|
||||
repository = "https://github.com/phundrak/p4bl0t"
|
||||
keywords = ["discord", "bot", "logging"]
|
||||
publish = false
|
||||
build = "build.rs"
|
||||
|
||||
[dependencies]
|
||||
color-eyre = "0.6.2"
|
||||
dotenvy = "0.15.7"
|
||||
poise = { version = "0.5.7" }
|
||||
sqlx = { version = "0.7.2", features = ["sqlite", "tls-rustls", "runtime-tokio-rustls"] }
|
||||
poise = { version = "0.6.1" }
|
||||
sqlx = { version = "0.7.3", features = ["sqlite", "tls-rustls", "runtime-tokio-rustls"] }
|
||||
tokio = { version = "1.34.0", features = ["macros", "rt-multi-thread"] }
|
||||
tracing = "0.1.40"
|
||||
tracing-subscriber = "0.3.18"
|
||||
|
93
Dockerfile
Normal file
93
Dockerfile
Normal file
@ -0,0 +1,93 @@
|
||||
# syntax=docker/dockerfile:1
|
||||
|
||||
# Comments are provided throughout this file to help you get started.
|
||||
# If you need more help, visit the Dockerfile reference guide at
|
||||
# https://docs.docker.com/go/dockerfile-reference/
|
||||
|
||||
ARG RUST_VERSION=1.73.0
|
||||
ARG APP_NAME=p4bl0t
|
||||
|
||||
################################################################################
|
||||
# xx is a helper for cross-compilation.
|
||||
# See https://github.com/tonistiigi/xx/ for more information.
|
||||
FROM --platform=$BUILDPLATFORM tonistiigi/xx:1.3.0 AS xx
|
||||
|
||||
################################################################################
|
||||
# Create a stage for building the application.
|
||||
FROM --platform=$BUILDPLATFORM rust:${RUST_VERSION}-alpine AS build
|
||||
ARG APP_NAME
|
||||
WORKDIR /app
|
||||
|
||||
# Copy cross compilation utilities from the xx stage.
|
||||
COPY --from=xx / /
|
||||
|
||||
# Install host build dependencies.
|
||||
RUN apk add --no-cache clang lld musl-dev git file
|
||||
|
||||
# This is the architecture you’re building for, which is passed in by the builder.
|
||||
# Placing it here allows the previous steps to be cached across architectures.
|
||||
ARG TARGETPLATFORM
|
||||
|
||||
# Install cross compilation build dependencies.
|
||||
RUN xx-apk add --no-cache musl-dev gcc
|
||||
|
||||
# Build the application.
|
||||
# Leverage a cache mount to /usr/local/cargo/registry/
|
||||
# for downloaded dependencies, a cache mount to /usr/local/cargo/git/db
|
||||
# for git repository dependencies, and a cache mount to /app/target/ for
|
||||
# compiled dependencies which will speed up subsequent builds.
|
||||
# Leverage a bind mount to the src directory to avoid having to copy the
|
||||
# source code into the container. Once built, copy the executable to an
|
||||
# output directory before the cache mounted /app/target is unmounted.
|
||||
RUN --mount=type=bind,source=src,target=src \
|
||||
--mount=type=bind,source=Cargo.toml,target=Cargo.toml \
|
||||
--mount=type=bind,source=Cargo.lock,target=Cargo.lock \
|
||||
--mount=type=bind,source=build.rs,target=build.rs \
|
||||
--mount=type=bind,source=.sqlx,target=.sqlx \
|
||||
--mount=type=bind,source=migrations,target=migrations \
|
||||
--mount=type=cache,target=/app/target/,id=rust-cache-${APP_NAME}-${TARGETPLATFORM} \
|
||||
--mount=type=cache,target=/usr/local/cargo/git/db \
|
||||
--mount=type=cache,target=/usr/local/cargo/registry/ \
|
||||
<<EOF
|
||||
set -e
|
||||
# xx-cargo build --locked --release --target-dir ./target
|
||||
xx-cargo build --locked --target-dir ./target
|
||||
cp ./target/$(xx-cargo --print-target-triple)/debug/$APP_NAME /bin/server
|
||||
xx-verify /bin/server
|
||||
EOF
|
||||
|
||||
################################################################################
|
||||
# Create a new stage for running the application that contains the minimal
|
||||
# runtime dependencies for the application. This often uses a different base
|
||||
# image from the build stage where the necessary files are copied from the build
|
||||
# stage.
|
||||
#
|
||||
# The example below uses the alpine image as the foundation for running the app.
|
||||
# By specifying the "3.18" tag, it will use version 3.18 of alpine. If
|
||||
# reproducability is important, consider using a digest
|
||||
# (e.g., alpine@sha256:664888ac9cfd28068e062c991ebcff4b4c7307dc8dd4df9e728bedde5c449d91).
|
||||
FROM alpine:3.18 AS final
|
||||
|
||||
# Create a non-privileged user that the app will run under.
|
||||
# See https://docs.docker.com/go/dockerfile-user-best-practices/
|
||||
ARG UID=10001
|
||||
RUN adduser \
|
||||
--disabled-password \
|
||||
--gecos "" \
|
||||
--home "/nonexistent" \
|
||||
--shell "/sbin/nologin" \
|
||||
--no-create-home \
|
||||
--uid "${UID}" \
|
||||
appuser
|
||||
WORKDIR /app
|
||||
RUN chown -R appuser /app
|
||||
USER appuser
|
||||
|
||||
# Copy the executable from the "build" stage.
|
||||
COPY --from=build /bin/server /bin/
|
||||
|
||||
# Expose the port that the application listens on.
|
||||
# EXPOSE 8080
|
||||
|
||||
# What the container should run when it is started.
|
||||
CMD ["/bin/server"]
|
22
README.md
22
README.md
@ -3,11 +3,24 @@
|
||||
p4bl0t is a simple logging bot for Discord written in Rust.
|
||||
|
||||
## Usage
|
||||
|
||||
In order to run p4bl0t, head over to your [developer
|
||||
### Preparation
|
||||
In order to run p4bl0t, you will need a Discord token with which your
|
||||
bot will authenticate. Head over to your [developer
|
||||
portal](https://discord.com/developers) on Discord’s website, and
|
||||
create a bot there. Then, copy the `.env.example` file to a `.env`
|
||||
file and fill in the details.
|
||||
create a bot there. You will be able to get the bot’s token there.
|
||||
|
||||
### Docker
|
||||
The easiest way to run p4bl0t is using Docker. Copy
|
||||
`docker-compose.example.yml` to `docker-compose.yml` and modify the
|
||||
`DISCORD_TOKEN` variable.
|
||||
|
||||
Then, you can simply run
|
||||
```sh
|
||||
docker compose up # or docker-compose on some machines
|
||||
```
|
||||
|
||||
### Building and running it yourself
|
||||
Copy the `.env.example` file to a `.env` file and fill in the details.
|
||||
```sh
|
||||
cp .env.example .env
|
||||
emacs .env
|
||||
@ -27,7 +40,6 @@ cargo install sqlx-cli
|
||||
|
||||
Setup your SQLite database.
|
||||
```sh
|
||||
export DATABASE_URL=<your-database-url> # should be the same as in the .env file
|
||||
sqlx database create
|
||||
sqlx migrate run
|
||||
```
|
||||
|
5
build.rs
Normal file
5
build.rs
Normal file
@ -0,0 +1,5 @@
|
||||
// generated by `sqlx migrate build-script`
|
||||
fn main() {
|
||||
// trigger recompilation when a new migration is added
|
||||
println!("cargo:rerun-if-changed=migrations");
|
||||
}
|
84
cliff.toml
Normal file
84
cliff.toml
Normal file
@ -0,0 +1,84 @@
|
||||
# git-cliff ~ default configuration file
|
||||
# https://git-cliff.org/docs/configuration
|
||||
#
|
||||
# Lines starting with "#" are comments.
|
||||
# Configuration options are organized into tables and keys.
|
||||
# See documentation for more information on available options.
|
||||
|
||||
[changelog]
|
||||
# changelog header
|
||||
header = """
|
||||
# Changelog\n
|
||||
All notable changes to this project will be documented in this file.\n
|
||||
"""
|
||||
# template for the changelog body
|
||||
# https://keats.github.io/tera/docs/#introduction
|
||||
body = """
|
||||
{% if version %}\
|
||||
## [{{ version | trim_start_matches(pat="v") }}] - {{ timestamp | date(format="%Y-%m-%d") }}
|
||||
{% else %}\
|
||||
## [unreleased]
|
||||
{% endif %}\
|
||||
{% for group, commits in commits | group_by(attribute="group") %}
|
||||
### {{ group | upper_first }}
|
||||
{% for commit in commits %}
|
||||
- {% if commit.breaking %}[**breaking**] {% endif %}{{ commit.message | upper_first }}\
|
||||
{% endfor %}
|
||||
{% endfor %}\n
|
||||
"""
|
||||
# remove the leading and trailing whitespace from the template
|
||||
trim = true
|
||||
# changelog footer
|
||||
footer = """
|
||||
<!-- generated by git-cliff -->
|
||||
"""
|
||||
# postprocessors
|
||||
postprocessors = [
|
||||
# { pattern = '<REPO>', replace = "https://github.com/orhun/git-cliff" }, # replace repository URL
|
||||
]
|
||||
[git]
|
||||
# parse the commits based on https://www.conventionalcommits.org
|
||||
conventional_commits = true
|
||||
# filter out the commits that are not conventional
|
||||
filter_unconventional = true
|
||||
# process each line of a commit as an individual commit
|
||||
split_commits = false
|
||||
# regex for preprocessing the commit messages
|
||||
commit_preprocessors = [
|
||||
# { pattern = '\((\w+\s)?#([0-9]+)\)', replace = "([#${2}](<REPO>/issues/${2}))"}, # replace issue numbers
|
||||
]
|
||||
# regex for parsing and grouping commits
|
||||
commit_parsers = [
|
||||
{ message = "^feat", group = "Features" },
|
||||
{ message = "^fix", group = "Bug Fixes" },
|
||||
{ message = "^doc", group = "Documentation" },
|
||||
{ message = "^perf", group = "Performance" },
|
||||
{ message = "^refactor", group = "Refactor" },
|
||||
{ message = "^style", group = "Styling" },
|
||||
{ message = "^test", group = "Testing" },
|
||||
{ message = "^chore\\(release\\): prepare for", skip = true },
|
||||
{ message = "^chore: bump (version )?to", skip = true },
|
||||
{ message = "^chore\\(deps\\)", skip = true },
|
||||
{ message = "^chore\\(pr\\)", skip = true },
|
||||
{ message = "^chore\\(pull\\)", skip = true },
|
||||
{ message = "^chore|ci", group = "Miscellaneous Tasks" },
|
||||
{ body = ".*security", group = "Security" },
|
||||
{ message = "^revert", group = "Revert" },
|
||||
]
|
||||
# protect breaking changes from being skipped due to matching a skipping commit_parser
|
||||
protect_breaking_commits = false
|
||||
# filter out the commits that are not matched by commit parsers
|
||||
filter_commits = false
|
||||
# regex for matching git tags
|
||||
tag_pattern = "[0-9].*"
|
||||
|
||||
# regex for skipping tags
|
||||
skip_tags = "v0.1.0-beta.1"
|
||||
# regex for ignoring tags
|
||||
ignore_tags = ""
|
||||
# sort the tags topologically
|
||||
topo_order = false
|
||||
# sort the commits inside sections by oldest/newest order
|
||||
sort_commits = "oldest"
|
||||
# limit the number of commits included in the changelog.
|
||||
# limit_commits = 42
|
9
docker-compose.example.yml
Normal file
9
docker-compose.example.yml
Normal file
@ -0,0 +1,9 @@
|
||||
services:
|
||||
p4bl0t:
|
||||
build:
|
||||
context: .
|
||||
target: final
|
||||
environment:
|
||||
DISCORD_TOKEN: changeme
|
||||
volumes:
|
||||
- ./p4bl0t.db:/app/p4bl0t.db
|
@ -1,10 +1,8 @@
|
||||
#![allow(clippy::cast_possible_wrap, clippy::cast_sign_loss)]
|
||||
|
||||
use std::env;
|
||||
|
||||
use poise::serenity_prelude::{ChannelId, GuildId};
|
||||
use sqlx::SqlitePool;
|
||||
use tracing::error;
|
||||
use sqlx::{migrate::MigrateDatabase, Sqlite, SqlitePool};
|
||||
use tracing::{error, info, debug};
|
||||
|
||||
pub type Result<T> = ::std::result::Result<T, sqlx::Error>;
|
||||
|
||||
@ -16,22 +14,23 @@ impl Database {
|
||||
/// The Sqlite database should already exist and have its
|
||||
/// migrations already executed.
|
||||
///
|
||||
/// # Panics
|
||||
///
|
||||
/// Panics if the environment variable `DATABASE_URL` is not set.
|
||||
///
|
||||
/// # Errors
|
||||
///
|
||||
/// This function will return an error if the Sqlite pool fails to
|
||||
/// create.
|
||||
pub async fn new() -> Result<Self> {
|
||||
Ok(Self(
|
||||
SqlitePool::connect(
|
||||
&env::var("DATABASE_URL")
|
||||
.expect("Missing enviroment variable DATABASE_URL"),
|
||||
)
|
||||
.await?,
|
||||
))
|
||||
let url = "sqlite:p4bl0t.db";
|
||||
if !Sqlite::database_exists(url).await? {
|
||||
info!("Creating database");
|
||||
Sqlite::create_database(url).await?;
|
||||
info!("Database created");
|
||||
}
|
||||
debug!("Getting pool connection");
|
||||
let pool = SqlitePool::connect(url).await?;
|
||||
info!("Running migrations");
|
||||
sqlx::migrate!().run(&pool).await?;
|
||||
debug!("Database initialized");
|
||||
Ok(Self(pool))
|
||||
}
|
||||
|
||||
/// Return from database all channels registered as loggers for a
|
||||
@ -44,7 +43,7 @@ impl Database {
|
||||
&self,
|
||||
guild_id: GuildId,
|
||||
) -> Result<Vec<ChannelId>> {
|
||||
let guild_id = guild_id.0 as i64;
|
||||
let guild_id = guild_id.get() as i64;
|
||||
sqlx::query!(
|
||||
r#"
|
||||
SELECT channel_id
|
||||
@ -63,7 +62,7 @@ WHERE guild_id = ?1"#,
|
||||
.map(|channels| {
|
||||
channels
|
||||
.iter()
|
||||
.map(|id| ChannelId(id.channel_id as u64))
|
||||
.map(|id| ChannelId::new(id.channel_id as u64))
|
||||
.collect()
|
||||
})
|
||||
}
|
||||
@ -81,8 +80,8 @@ WHERE guild_id = ?1"#,
|
||||
guild_id: GuildId,
|
||||
channel_id: ChannelId,
|
||||
) -> Result<()> {
|
||||
let guild_id = guild_id.0 as i64;
|
||||
let channel_id = channel_id.0 as i64;
|
||||
let guild_id = guild_id.get() as i64;
|
||||
let channel_id = channel_id.get() as i64;
|
||||
let mut conn = self.0.acquire().await?;
|
||||
|
||||
sqlx::query!(r#"
|
||||
@ -113,8 +112,8 @@ VALUES ( ?1, ?2 )"#,
|
||||
guild: GuildId,
|
||||
channel: ChannelId,
|
||||
) -> Result<()> {
|
||||
let guild_id = guild.0 as i64;
|
||||
let channel_id = channel.0 as i64;
|
||||
let guild_id = guild.get() as i64;
|
||||
let channel_id = channel.get() as i64;
|
||||
let mut conn = self.0.acquire().await?;
|
||||
sqlx::query!(r#"
|
||||
DELETE FROM guild_log_channels
|
||||
|
@ -4,19 +4,18 @@ use super::super::Result;
|
||||
|
||||
use super::super::error::Error as DiscordError;
|
||||
|
||||
use poise::serenity_prelude::{self as serenity, CreateEmbed};
|
||||
use poise::serenity_prelude::{self as serenity, CreateEmbed, CreateMessage};
|
||||
use tracing::{error, info};
|
||||
|
||||
fn message_for_everyone_mention(
|
||||
embed: &mut CreateEmbed,
|
||||
fn create_embed_for_mention(
|
||||
message: &serenity::Message,
|
||||
guild_id: u64,
|
||||
) {
|
||||
let author = message.author.clone();
|
||||
let message_channel = message.channel_id.0;
|
||||
embed
|
||||
) -> CreateEmbed {
|
||||
let author_id = message.author.id.to_string();
|
||||
let message_channel = message.channel_id.get();
|
||||
CreateEmbed::new()
|
||||
.title("Someone mentioned everyone!")
|
||||
.field("Author", author.clone(), true)
|
||||
.field("Author", format!("<@{author_id}>"), true)
|
||||
.field("When", message.timestamp.naive_local().to_string(), true)
|
||||
.field("Channel", format!("<#{message_channel}>"), true)
|
||||
.field(
|
||||
@ -26,7 +25,7 @@ fn message_for_everyone_mention(
|
||||
message.id
|
||||
),
|
||||
false,
|
||||
);
|
||||
)
|
||||
}
|
||||
|
||||
/// Handle messages mentioning everyone.
|
||||
@ -54,13 +53,10 @@ pub async fn handle_everyone_mention(
|
||||
database.get_logging_channels(guild_id).await?;
|
||||
for channel in &channels {
|
||||
// Ignore result, it'll be in the bot's logger
|
||||
let embed = create_embed_for_mention(message, guild_id.get());
|
||||
let builder = CreateMessage::new().embed(embed);
|
||||
let _ = channel
|
||||
.send_message(&ctx, |m| {
|
||||
m.embed(|e| {
|
||||
message_for_everyone_mention(e, message, guild_id.0);
|
||||
e
|
||||
})
|
||||
})
|
||||
.send_message(&ctx, builder)
|
||||
.await
|
||||
.map_err(|e| error!("Failed to send message: {e:?}"));
|
||||
}
|
||||
|
@ -1,9 +1,6 @@
|
||||
use super::{utils::BotData, Error, Result};
|
||||
|
||||
use poise::{
|
||||
serenity_prelude::{self as serenity},
|
||||
Event,
|
||||
};
|
||||
use poise::serenity_prelude::{self as serenity, FullEvent};
|
||||
use tracing::info;
|
||||
|
||||
mod everyone;
|
||||
@ -17,15 +14,15 @@ use everyone::handle_everyone_mention;
|
||||
/// themselves.
|
||||
pub async fn event_handler(
|
||||
ctx: &serenity::Context,
|
||||
event: &Event<'_>,
|
||||
event: &FullEvent,
|
||||
_framework: poise::FrameworkContext<'_, BotData, Error>,
|
||||
data: &BotData,
|
||||
) -> Result {
|
||||
match event {
|
||||
Event::Ready { data_about_bot } => {
|
||||
FullEvent::Ready { data_about_bot } => {
|
||||
info!("Logged in as {}", data_about_bot.user.name);
|
||||
}
|
||||
Event::Message { new_message } => {
|
||||
FullEvent::Message { new_message } => {
|
||||
handle_everyone_mention(ctx, &data.database, new_message).await?;
|
||||
}
|
||||
_ => {}
|
||||
|
@ -1,9 +1,9 @@
|
||||
mod commands;
|
||||
pub mod error;
|
||||
mod events;
|
||||
pub mod utils;
|
||||
pub mod error;
|
||||
|
||||
use poise::FrameworkBuilder;
|
||||
use poise::serenity_prelude::ClientBuilder;
|
||||
use utils::serenity;
|
||||
|
||||
use commands::logging;
|
||||
@ -18,8 +18,10 @@ pub type Result = ::std::result::Result<(), Error>;
|
||||
/// # Panics
|
||||
///
|
||||
/// Panics if the environment `DISCORD_TOKEN` is unavailable.
|
||||
pub fn make_bot() -> FrameworkBuilder<BotData, Error> {
|
||||
poise::Framework::builder()
|
||||
pub fn make_bot() -> ClientBuilder {
|
||||
let intents = serenity::GatewayIntents::non_privileged();
|
||||
let token = std::env::var("DISCORD_TOKEN").expect("missing DISCORD_TOKEN");
|
||||
let framework = poise::Framework::builder()
|
||||
.options(poise::FrameworkOptions {
|
||||
commands: vec![logging()],
|
||||
event_handler: |ctx, event, framework, data| {
|
||||
@ -27,8 +29,6 @@ pub fn make_bot() -> FrameworkBuilder<BotData, Error> {
|
||||
},
|
||||
..Default::default()
|
||||
})
|
||||
.token(std::env::var("DISCORD_TOKEN").expect("missing DISCORD_TOKEN"))
|
||||
.intents(serenity::GatewayIntents::non_privileged())
|
||||
.setup(|ctx, _ready, framework| {
|
||||
Box::pin(async move {
|
||||
poise::builtins::register_globally(
|
||||
@ -39,4 +39,6 @@ pub fn make_bot() -> FrameworkBuilder<BotData, Error> {
|
||||
Ok(BotData::new().await?)
|
||||
})
|
||||
})
|
||||
.build();
|
||||
ClientBuilder::new(token, intents).framework(framework)
|
||||
}
|
||||
|
@ -1,19 +1,18 @@
|
||||
#![warn(clippy::style, clippy::pedantic)]
|
||||
|
||||
mod utils;
|
||||
mod db;
|
||||
mod discord;
|
||||
mod utils;
|
||||
|
||||
use std::error::Error;
|
||||
|
||||
#[tokio::main]
|
||||
async fn main() -> Result<(), Box<dyn Error>> {
|
||||
dotenvy::dotenv()?;
|
||||
color_eyre::install()?;
|
||||
utils::setup_logging();
|
||||
color_eyre::install()?;
|
||||
|
||||
let bot = discord::make_bot();
|
||||
bot.run().await?;
|
||||
let mut bot = discord::make_bot().await?;
|
||||
bot.start().await?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
Loading…
Reference in New Issue
Block a user