Compare commits

..

3 Commits

Author SHA1 Message Date
58bd07d556
chore: remove unnecessary step
All checks were successful
Create and publish a Docker image / build-and-push-image (push) Successful in 1m29s
Default docker image in Gitea runner now ships with Docker
2023-12-19 20:09:05 +01:00
ad014dd1ae
chore: simplify Dockerfile 2023-12-19 20:09:05 +01:00
60a81f66a8
feat: Enable Docker deployment and CD
All checks were successful
Create and publish a Docker image / build-and-push-image (push) Successful in 8m1s
Closes #8, partially addresses #6
2023-11-26 04:22:56 +01:00
21 changed files with 552 additions and 891 deletions

View File

@ -1,32 +1,3 @@
# Include any files or directories that you don't want to be copied to your /assets
# container here (e.g., local build artifacts, temporary files, etc.). /.sqlx
# /.env.example
# For more help, visit the .dockerignore file reference guide at
# https://docs.docker.com/go/build-context-dockerignore/
**/.DS_Store
**/.classpath
**/.dockerignore
**/.env
**/.git
**/.gitignore
**/.project
**/.settings
**/.toolstarget
**/.vs
**/.vscode
**/*.*proj.user
**/*.dbmdl
**/*.jfm
**/charts
**/docker-compose*
**/compose*
**/Dockerfile*
**/node_modules
**/npm-debug.log
**/secrets.dev.yaml
**/values.dev.yaml
/bin
/target
LICENSE
README.md

View File

@ -1 +1,5 @@
DISCORD_TOKEN=changeme DISCORD_TOKEN=changeme
# Only useful when developing locally. Do not change it when deploying
# with Docker.
DATABASE_URL=sqlite:p4bl0t.db

View File

@ -0,0 +1,40 @@
name: Create and publish a Docker image
on:
push:
branches: ['main', 'feature/docker']
env:
REGISTRY: labs.phundrak.com
IMAGE_NAME: ${{ gitea.repository }}
jobs:
build-and-push-image:
runs-on: ubuntu-latest
permissions:
contents: read
packages: write
steps:
- name: Checkout repository
uses: actions/checkout@v4
- name: Log in to the Container registry
uses: docker/login-action@65b78e6e13532edd9afa3aa52ac7964289d1a9c1
with:
registry: ${{ env.REGISTRY }}
username: ${{ gitea.actor }}
password: ${{ secrets.DOCKER_REGISTRY_TOKEN }}
- name: Extract metadata (tags, labels) for Docker
id: meta
uses: docker/metadata-action@9ec57ed1fcdbf14dcef7dfbe97b2010124a938b7
with:
images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}
- name: Build and push Docker image
uses: docker/build-push-action@f2a1d5e99d037542a71f64918e516c093c6f3fc4
with:
context: .
push: true
tags: ${{ steps.meta.outputs.tags }}
labels: ${{ steps.meta.outputs.labels }}

1
.gitignore vendored
View File

@ -1,3 +1,4 @@
/target /target
/.env /.env
*.db *.db
/.sqlx/

View File

@ -1,12 +0,0 @@
{
"db_name": "SQLite",
"query": "\nINSERT INTO guild_log_channels (guild_id, channel_id)\nVALUES ( ?1, ?2 )",
"describe": {
"columns": [],
"parameters": {
"Right": 2
},
"nullable": []
},
"hash": "5b44991d1514160fa00572e398f0577ad44f839a0470f9eeb89da8b5e77f0e03"
}

View File

@ -1,20 +0,0 @@
{
"db_name": "SQLite",
"query": "\nSELECT channel_id\nFROM guild_log_channels\nWHERE guild_id = ?1",
"describe": {
"columns": [
{
"name": "channel_id",
"ordinal": 0,
"type_info": "Int64"
}
],
"parameters": {
"Right": 1
},
"nullable": [
false
]
},
"hash": "8444f7b7452a5ace6352aef943274f8a345a958257d896c7658b7700557959ab"
}

View File

@ -1,12 +0,0 @@
{
"db_name": "SQLite",
"query": "\nDELETE FROM guild_log_channels\nWHERE guild_id = ?1 AND channel_id = ?2",
"describe": {
"columns": [],
"parameters": {
"Right": 2
},
"nullable": []
},
"hash": "d6e9f422d6ae29a00658f55165018119d1e13d407266440415dfcc17a97ba00e"
}

View File

@ -1,34 +0,0 @@
# Changelog
All notable changes to this project will be documented in this file.
## [unreleased]
### Documentation
- Add Changelog
## [1.0.1] - 2024-01-18
### Features
- Dockerize p4bl0t
### Miscellaneous Tasks
- Update bot framework
### Refactor
- Simplify code, better organize it, and comment it
## [1.0.0] - 2023-11-23
### Features
- Add a channel as a logging channel
- Add listing logger channels in a guild
- Unset a channel as a logger
- Send in logger channels mentions to everyone
<!-- generated by git-cliff -->

921
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@ -1,6 +1,6 @@
[package] [package]
name = "p4bl0t" name = "p4bl0t"
version = "1.0.1" version = "1.0.0"
edition = "2021" edition = "2021"
authors = ["Lucien Cartier-Tilet <lucien@phundrak.com>"] authors = ["Lucien Cartier-Tilet <lucien@phundrak.com>"]
license-file = "LICENSE.md" license-file = "LICENSE.md"
@ -10,12 +10,12 @@ homepage = "https://github.com/phundrak/p4bl0t"
repository = "https://github.com/phundrak/p4bl0t" repository = "https://github.com/phundrak/p4bl0t"
keywords = ["discord", "bot", "logging"] keywords = ["discord", "bot", "logging"]
publish = false publish = false
build = "build.rs"
[dependencies] [dependencies]
color-eyre = "0.6.2" color-eyre = "0.6.2"
poise = { version = "0.6.1" } dotenvy = "0.15.7"
sqlx = { version = "0.7.3", features = ["sqlite", "tls-rustls", "runtime-tokio-rustls"] } poise = { version = "0.5.7" }
sqlx = { version = "0.7.2", features = ["sqlite", "tls-rustls", "runtime-tokio-rustls"] }
tokio = { version = "1.34.0", features = ["macros", "rt-multi-thread"] } tokio = { version = "1.34.0", features = ["macros", "rt-multi-thread"] }
tracing = "0.1.40" tracing = "0.1.40"
tracing-subscriber = "0.3.18" tracing-subscriber = "0.3.18"

View File

@ -1,75 +1,30 @@
# syntax=docker/dockerfile:1
# Comments are provided throughout this file to help you get started.
# If you need more help, visit the Dockerfile reference guide at
# https://docs.docker.com/go/dockerfile-reference/
ARG RUST_VERSION=1.73.0 ARG RUST_VERSION=1.73.0
ARG APP_NAME=p4bl0t FROM rust:${RUST_VERSION}-slim-bullseye AS build
################################################################################ RUN --mount=type=cache,target=/usr/local/cargo/registry \
# xx is a helper for cross-compilation. cargo install sqlx-cli --no-default-features --features rustls,sqlite && \
# See https://github.com/tonistiigi/xx/ for more information. cp /usr/local/cargo/bin/sqlx /bin/sqlx
FROM --platform=$BUILDPLATFORM tonistiigi/xx:1.3.0 AS xx
ENV DATABASE_URL=sqlite:/var/p4bl0t.db
################################################################################
# Create a stage for building the application.
FROM --platform=$BUILDPLATFORM rust:${RUST_VERSION}-alpine AS build
ARG APP_NAME
WORKDIR /app WORKDIR /app
# Copy cross compilation utilities from the xx stage.
COPY --from=xx / /
# Install host build dependencies.
RUN apk add --no-cache clang lld musl-dev git file
# This is the architecture youre building for, which is passed in by the builder.
# Placing it here allows the previous steps to be cached across architectures.
ARG TARGETPLATFORM
# Install cross compilation build dependencies.
RUN xx-apk add --no-cache musl-dev gcc
# Build the application.
# Leverage a cache mount to /usr/local/cargo/registry/
# for downloaded dependencies, a cache mount to /usr/local/cargo/git/db
# for git repository dependencies, and a cache mount to /app/target/ for
# compiled dependencies which will speed up subsequent builds.
# Leverage a bind mount to the src directory to avoid having to copy the
# source code into the container. Once built, copy the executable to an
# output directory before the cache mounted /app/target is unmounted.
RUN --mount=type=bind,source=src,target=src \ RUN --mount=type=bind,source=src,target=src \
--mount=type=bind,source=Cargo.toml,target=Cargo.toml \ --mount=type=bind,source=Cargo.toml,target=Cargo.toml \
--mount=type=bind,source=Cargo.lock,target=Cargo.lock \ --mount=type=bind,source=Cargo.lock,target=Cargo.lock \
--mount=type=bind,source=build.rs,target=build.rs \
--mount=type=bind,source=.sqlx,target=.sqlx \
--mount=type=bind,source=migrations,target=migrations \ --mount=type=bind,source=migrations,target=migrations \
--mount=type=cache,target=/app/target/,id=rust-cache-${APP_NAME}-${TARGETPLATFORM} \ --mount=type=cache,target=/app/target/ \
--mount=type=cache,target=/usr/local/cargo/git/db \ --mount=type=cache,target=/usr/local/cargo/registry \
--mount=type=cache,target=/usr/local/cargo/registry/ \
<<EOF <<EOF
set -e set -e
# xx-cargo build --locked --release --target-dir ./target sqlx database create
xx-cargo build --locked --target-dir ./target sqlx migrate run
cp ./target/$(xx-cargo --print-target-triple)/debug/$APP_NAME /bin/server cargo install --locked --path .
xx-verify /bin/server
EOF EOF
################################################################################
# Create a new stage for running the application that contains the minimal
# runtime dependencies for the application. This often uses a different base
# image from the build stage where the necessary files are copied from the build
# stage.
#
# The example below uses the alpine image as the foundation for running the app.
# By specifying the "3.18" tag, it will use version 3.18 of alpine. If
# reproducability is important, consider using a digest
# (e.g., alpine@sha256:664888ac9cfd28068e062c991ebcff4b4c7307dc8dd4df9e728bedde5c449d91).
FROM alpine:3.18 AS final
# Create a non-privileged user that the app will run under. FROM debian:bullseye-slim AS final
# See https://docs.docker.com/go/dockerfile-user-best-practices/
RUN apt-get update && apt-get install -qqy ca-certificates
ARG UID=10001 ARG UID=10001
RUN adduser \ RUN adduser \
--disabled-password \ --disabled-password \
@ -79,15 +34,12 @@ RUN adduser \
--no-create-home \ --no-create-home \
--uid "${UID}" \ --uid "${UID}" \
appuser appuser
WORKDIR /app
RUN chown -R appuser /app
USER appuser USER appuser
# Copy the executable from the "build" stage. ENV DATABASE_URL=sqlite:/var/p4bl0t.db
COPY --from=build /bin/server /bin/ ENV DISCORD_TOKEN=changeme
# Expose the port that the application listens on. COPY --from=build /usr/local/cargo/bin/p4bl0t /bin
# EXPOSE 8080 COPY --chown=appuser --from=build /var/p4bl0t.db /var/p4bl0t.db
# What the container should run when it is started. CMD [ "p4bl0t" ]
CMD ["/bin/server"]

View File

@ -3,24 +3,11 @@
p4bl0t is a simple logging bot for Discord written in Rust. p4bl0t is a simple logging bot for Discord written in Rust.
## Usage ## Usage
### Preparation
In order to run p4bl0t, you will need a Discord token with which your In order to run p4bl0t, head over to your [developer
bot will authenticate. Head over to your [developer
portal](https://discord.com/developers) on Discords website, and portal](https://discord.com/developers) on Discords website, and
create a bot there. You will be able to get the bots token there. create a bot there. Then, copy the `.env.example` file to a `.env`
file and fill in the details.
### Docker
The easiest way to run p4bl0t is using Docker. Copy
`docker-compose.example.yml` to `docker-compose.yml` and modify the
`DISCORD_TOKEN` variable.
Then, you can simply run
```sh
docker compose up # or docker-compose on some machines
```
### Building and running it yourself
Copy the `.env.example` file to a `.env` file and fill in the details.
```sh ```sh
cp .env.example .env cp .env.example .env
emacs .env emacs .env
@ -40,6 +27,7 @@ cargo install sqlx-cli
Setup your SQLite database. Setup your SQLite database.
```sh ```sh
export DATABASE_URL=<your-database-url> # should be the same as in the .env file
sqlx database create sqlx database create
sqlx migrate run sqlx migrate run
``` ```

View File

@ -1,5 +0,0 @@
// generated by `sqlx migrate build-script`
fn main() {
// trigger recompilation when a new migration is added
println!("cargo:rerun-if-changed=migrations");
}

View File

@ -1,84 +0,0 @@
# git-cliff ~ default configuration file
# https://git-cliff.org/docs/configuration
#
# Lines starting with "#" are comments.
# Configuration options are organized into tables and keys.
# See documentation for more information on available options.
[changelog]
# changelog header
header = """
# Changelog\n
All notable changes to this project will be documented in this file.\n
"""
# template for the changelog body
# https://keats.github.io/tera/docs/#introduction
body = """
{% if version %}\
## [{{ version | trim_start_matches(pat="v") }}] - {{ timestamp | date(format="%Y-%m-%d") }}
{% else %}\
## [unreleased]
{% endif %}\
{% for group, commits in commits | group_by(attribute="group") %}
### {{ group | upper_first }}
{% for commit in commits %}
- {% if commit.breaking %}[**breaking**] {% endif %}{{ commit.message | upper_first }}\
{% endfor %}
{% endfor %}\n
"""
# remove the leading and trailing whitespace from the template
trim = true
# changelog footer
footer = """
<!-- generated by git-cliff -->
"""
# postprocessors
postprocessors = [
# { pattern = '<REPO>', replace = "https://github.com/orhun/git-cliff" }, # replace repository URL
]
[git]
# parse the commits based on https://www.conventionalcommits.org
conventional_commits = true
# filter out the commits that are not conventional
filter_unconventional = true
# process each line of a commit as an individual commit
split_commits = false
# regex for preprocessing the commit messages
commit_preprocessors = [
# { pattern = '\((\w+\s)?#([0-9]+)\)', replace = "([#${2}](<REPO>/issues/${2}))"}, # replace issue numbers
]
# regex for parsing and grouping commits
commit_parsers = [
{ message = "^feat", group = "Features" },
{ message = "^fix", group = "Bug Fixes" },
{ message = "^doc", group = "Documentation" },
{ message = "^perf", group = "Performance" },
{ message = "^refactor", group = "Refactor" },
{ message = "^style", group = "Styling" },
{ message = "^test", group = "Testing" },
{ message = "^chore\\(release\\): prepare for", skip = true },
{ message = "^chore: bump (version )?to", skip = true },
{ message = "^chore\\(deps\\)", skip = true },
{ message = "^chore\\(pr\\)", skip = true },
{ message = "^chore\\(pull\\)", skip = true },
{ message = "^chore|ci", group = "Miscellaneous Tasks" },
{ body = ".*security", group = "Security" },
{ message = "^revert", group = "Revert" },
]
# protect breaking changes from being skipped due to matching a skipping commit_parser
protect_breaking_commits = false
# filter out the commits that are not matched by commit parsers
filter_commits = false
# regex for matching git tags
tag_pattern = "[0-9].*"
# regex for skipping tags
skip_tags = "v0.1.0-beta.1"
# regex for ignoring tags
ignore_tags = ""
# sort the tags topologically
topo_order = false
# sort the commits inside sections by oldest/newest order
sort_commits = "oldest"
# limit the number of commits included in the changelog.
# limit_commits = 42

View File

@ -1,9 +0,0 @@
services:
p4bl0t:
build:
context: .
target: final
environment:
DISCORD_TOKEN: changeme
volumes:
- ./p4bl0t.db:/app/p4bl0t.db

8
docker-compose.yml Normal file
View File

@ -0,0 +1,8 @@
version: '3'
services:
p4bl0t:
env_file: .env
build:
context: .
target: final

View File

@ -1,8 +1,10 @@
#![allow(clippy::cast_possible_wrap, clippy::cast_sign_loss)] #![allow(clippy::cast_possible_wrap, clippy::cast_sign_loss)]
use std::env;
use poise::serenity_prelude::{ChannelId, GuildId}; use poise::serenity_prelude::{ChannelId, GuildId};
use sqlx::{migrate::MigrateDatabase, Sqlite, SqlitePool}; use sqlx::SqlitePool;
use tracing::{error, info, debug}; use tracing::{error, info};
pub type Result<T> = ::std::result::Result<T, sqlx::Error>; pub type Result<T> = ::std::result::Result<T, sqlx::Error>;
@ -14,23 +16,20 @@ impl Database {
/// The Sqlite database should already exist and have its /// The Sqlite database should already exist and have its
/// migrations already executed. /// migrations already executed.
/// ///
/// # Panics
///
/// Panics if the environment variable `DATABASE_URL` is not set.
///
/// # Errors /// # Errors
/// ///
/// This function will return an error if the Sqlite pool fails to /// This function will return an error if the Sqlite pool fails to
/// create. /// create.
// TODO: Create the database if it doesnt exist already and run migrations
pub async fn new() -> Result<Self> { pub async fn new() -> Result<Self> {
let url = "sqlite:p4bl0t.db"; let db_url = env::var("DATABASE_URL")
if !Sqlite::database_exists(url).await? { .expect("Missing enviroment variable DATABASE_URL");
info!("Creating database"); info!("Connecting to database located at {db_url}");
Sqlite::create_database(url).await?; Ok(Self(SqlitePool::connect(&db_url).await?))
info!("Database created");
}
debug!("Getting pool connection");
let pool = SqlitePool::connect(url).await?;
info!("Running migrations");
sqlx::migrate!().run(&pool).await?;
debug!("Database initialized");
Ok(Self(pool))
} }
/// Return from database all channels registered as loggers for a /// Return from database all channels registered as loggers for a
@ -43,7 +42,7 @@ impl Database {
&self, &self,
guild_id: GuildId, guild_id: GuildId,
) -> Result<Vec<ChannelId>> { ) -> Result<Vec<ChannelId>> {
let guild_id = guild_id.get() as i64; let guild_id = guild_id.0 as i64;
sqlx::query!( sqlx::query!(
r#" r#"
SELECT channel_id SELECT channel_id
@ -62,7 +61,7 @@ WHERE guild_id = ?1"#,
.map(|channels| { .map(|channels| {
channels channels
.iter() .iter()
.map(|id| ChannelId::new(id.channel_id as u64)) .map(|id| ChannelId(id.channel_id as u64))
.collect() .collect()
}) })
} }
@ -80,8 +79,8 @@ WHERE guild_id = ?1"#,
guild_id: GuildId, guild_id: GuildId,
channel_id: ChannelId, channel_id: ChannelId,
) -> Result<()> { ) -> Result<()> {
let guild_id = guild_id.get() as i64; let guild_id = guild_id.0 as i64;
let channel_id = channel_id.get() as i64; let channel_id = channel_id.0 as i64;
let mut conn = self.0.acquire().await?; let mut conn = self.0.acquire().await?;
sqlx::query!(r#" sqlx::query!(r#"
@ -112,8 +111,8 @@ VALUES ( ?1, ?2 )"#,
guild: GuildId, guild: GuildId,
channel: ChannelId, channel: ChannelId,
) -> Result<()> { ) -> Result<()> {
let guild_id = guild.get() as i64; let guild_id = guild.0 as i64;
let channel_id = channel.get() as i64; let channel_id = channel.0 as i64;
let mut conn = self.0.acquire().await?; let mut conn = self.0.acquire().await?;
sqlx::query!(r#" sqlx::query!(r#"
DELETE FROM guild_log_channels DELETE FROM guild_log_channels

View File

@ -4,18 +4,19 @@ use super::super::Result;
use super::super::error::Error as DiscordError; use super::super::error::Error as DiscordError;
use poise::serenity_prelude::{self as serenity, CreateEmbed, CreateMessage}; use poise::serenity_prelude::{self as serenity, CreateEmbed};
use tracing::{error, info}; use tracing::{error, info};
fn create_embed_for_mention( fn message_for_everyone_mention(
embed: &mut CreateEmbed,
message: &serenity::Message, message: &serenity::Message,
guild_id: u64, guild_id: u64,
) -> CreateEmbed { ) {
let author_id = message.author.id.to_string(); let author = message.author.clone();
let message_channel = message.channel_id.get(); let message_channel = message.channel_id.0;
CreateEmbed::new() embed
.title("Someone mentioned everyone!") .title("Someone mentioned everyone!")
.field("Author", format!("<@{author_id}>"), true) .field("Author", author.clone(), true)
.field("When", message.timestamp.naive_local().to_string(), true) .field("When", message.timestamp.naive_local().to_string(), true)
.field("Channel", format!("<#{message_channel}>"), true) .field("Channel", format!("<#{message_channel}>"), true)
.field( .field(
@ -25,7 +26,7 @@ fn create_embed_for_mention(
message.id message.id
), ),
false, false,
) );
} }
/// Handle messages mentioning everyone. /// Handle messages mentioning everyone.
@ -53,10 +54,13 @@ pub async fn handle_everyone_mention(
database.get_logging_channels(guild_id).await?; database.get_logging_channels(guild_id).await?;
for channel in &channels { for channel in &channels {
// Ignore result, it'll be in the bot's logger // Ignore result, it'll be in the bot's logger
let embed = create_embed_for_mention(message, guild_id.get());
let builder = CreateMessage::new().embed(embed);
let _ = channel let _ = channel
.send_message(&ctx, builder) .send_message(&ctx, |m| {
m.embed(|e| {
message_for_everyone_mention(e, message, guild_id.0);
e
})
})
.await .await
.map_err(|e| error!("Failed to send message: {e:?}")); .map_err(|e| error!("Failed to send message: {e:?}"));
} }

View File

@ -1,6 +1,9 @@
use super::{utils::BotData, Error, Result}; use super::{utils::BotData, Error, Result};
use poise::serenity_prelude::{self as serenity, FullEvent}; use poise::{
serenity_prelude::{self as serenity},
Event,
};
use tracing::info; use tracing::info;
mod everyone; mod everyone;
@ -14,15 +17,15 @@ use everyone::handle_everyone_mention;
/// themselves. /// themselves.
pub async fn event_handler( pub async fn event_handler(
ctx: &serenity::Context, ctx: &serenity::Context,
event: &FullEvent, event: &Event<'_>,
_framework: poise::FrameworkContext<'_, BotData, Error>, _framework: poise::FrameworkContext<'_, BotData, Error>,
data: &BotData, data: &BotData,
) -> Result { ) -> Result {
match event { match event {
FullEvent::Ready { data_about_bot } => { Event::Ready { data_about_bot } => {
info!("Logged in as {}", data_about_bot.user.name); info!("Logged in as {}", data_about_bot.user.name);
} }
FullEvent::Message { new_message } => { Event::Message { new_message } => {
handle_everyone_mention(ctx, &data.database, new_message).await?; handle_everyone_mention(ctx, &data.database, new_message).await?;
} }
_ => {} _ => {}

View File

@ -3,7 +3,8 @@ pub mod error;
mod events; mod events;
pub mod utils; pub mod utils;
use poise::serenity_prelude::ClientBuilder; use poise::FrameworkBuilder;
use tracing::info;
use utils::serenity; use utils::serenity;
use commands::logging; use commands::logging;
@ -18,10 +19,11 @@ pub type Result = ::std::result::Result<(), Error>;
/// # Panics /// # Panics
/// ///
/// Panics if the environment `DISCORD_TOKEN` is unavailable. /// Panics if the environment `DISCORD_TOKEN` is unavailable.
pub fn make_bot() -> ClientBuilder { pub fn make_bot() -> FrameworkBuilder<BotData, Error> {
let intents = serenity::GatewayIntents::non_privileged(); match std::env::var("DISCORD_TOKEN") {
let token = std::env::var("DISCORD_TOKEN").expect("missing DISCORD_TOKEN"); Ok(token) => {
let framework = poise::Framework::builder() info!("Launching bot with token {token}");
poise::Framework::builder()
.options(poise::FrameworkOptions { .options(poise::FrameworkOptions {
commands: vec![logging()], commands: vec![logging()],
event_handler: |ctx, event, framework, data| { event_handler: |ctx, event, framework, data| {
@ -29,6 +31,8 @@ pub fn make_bot() -> ClientBuilder {
}, },
..Default::default() ..Default::default()
}) })
.token(token)
.intents(serenity::GatewayIntents::non_privileged())
.setup(|ctx, _ready, framework| { .setup(|ctx, _ready, framework| {
Box::pin(async move { Box::pin(async move {
poise::builtins::register_globally( poise::builtins::register_globally(
@ -39,6 +43,7 @@ pub fn make_bot() -> ClientBuilder {
Ok(BotData::new().await?) Ok(BotData::new().await?)
}) })
}) })
.build(); }
ClientBuilder::new(token, intents).framework(framework) Err(_) => panic!("DISCORD_TOKEN environment variable is missing."),
}
} }

View File

@ -6,13 +6,20 @@ mod utils;
use std::error::Error; use std::error::Error;
use tracing::info;
#[tokio::main] #[tokio::main]
async fn main() -> Result<(), Box<dyn Error>> { async fn main() -> Result<(), Box<dyn Error>> {
println!("Setting logging up");
utils::setup_logging(); utils::setup_logging();
info!("Setting up color_eyre");
color_eyre::install()?; color_eyre::install()?;
info!("Reading from dotenv");
let mut bot = discord::make_bot().await?; let _ =
bot.start().await?; dotenvy::dotenv().map_err(|_| info!("No .env file found, skipping"));
info!("Launching bot");
let bot = discord::make_bot();
bot.run().await?;
Ok(()) Ok(())
} }