Compare commits
2 Commits
main
...
4e9428c376
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
4e9428c376 | ||
|
|
5a084b5bf0 |
11
LICENSE
Normal file
11
LICENSE
Normal file
@@ -0,0 +1,11 @@
|
|||||||
|
Copyright 2025 Micheal Smith
|
||||||
|
|
||||||
|
Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:
|
||||||
|
|
||||||
|
1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.
|
||||||
|
|
||||||
|
2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution.
|
||||||
|
|
||||||
|
3. Redistributions in any form must retain this license verbatim. No additional licensing terms, including but not limited to the GNU General Public License, may be imposed on the original or modified work.
|
||||||
|
|
||||||
|
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS “AS IS” AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||||
22
README.md
Normal file
22
README.md
Normal file
@@ -0,0 +1,22 @@
|
|||||||
|
# Robotnik - A Basic LLM Capable IRC Bot.
|
||||||
|
|
||||||
|
This is an IRC bot that. The name is based on a fictional video game villain.
|
||||||
|
Currently it supports any LLM that uses the OpenAI style of interface. They
|
||||||
|
can be selected via command line options, environment variables, or via a configuration
|
||||||
|
file. There is a [configureation file](config.toml) that *should* contain all available options
|
||||||
|
currently.
|
||||||
|
|
||||||
|
## Some supported but ~~possibly~~ *mostly* untested LLMs:
|
||||||
|
|
||||||
|
| Name | Model | Base URL | Teested |
|
||||||
|
|------------|-------------------|-------------------------------------------|---------|
|
||||||
|
| OpenAI | gpt-5 | https://api.openai.com/v1 | no |
|
||||||
|
| Deepseek | deepseek-chat | https://api.deepseek.com/v1 | yes |
|
||||||
|
| Anthropic | claude-sonnet-4-0 | https://api.anthropic.com/v1 | no |
|
||||||
|
| Gemini | gemini-2.5-turbo | https://generativelanguage.googleapis.com | no |
|
||||||
|
| OpenRouter | some-model | https://api.openrouter.ai/v1 | no |
|
||||||
|
|
||||||
|
## Further reading...
|
||||||
|
|
||||||
|
There should be a man page that might be useful. Otherwise the -h/--help
|
||||||
|
switch should hopefully suffice.
|
||||||
@@ -2,6 +2,10 @@
|
|||||||
|
|
||||||
api-key = "<YOUR-KEY>"
|
api-key = "<YOUR-KEY>"
|
||||||
base-url = "api.openai.com"
|
base-url = "api.openai.com"
|
||||||
|
chroot-dir = "/home/bot/root"
|
||||||
|
|
||||||
|
# If using chroot (recommended) then this will be relative.
|
||||||
|
command-path = "/cmds"
|
||||||
|
|
||||||
# If you don't already know the model name you can generally find a listing
|
# If you don't already know the model name you can generally find a listing
|
||||||
# on the models API pages.
|
# on the models API pages.
|
||||||
|
|||||||
16
src/main.rs
16
src/main.rs
@@ -3,6 +3,7 @@ use color_eyre::{
|
|||||||
eyre::WrapErr,
|
eyre::WrapErr,
|
||||||
};
|
};
|
||||||
use human_panic::setup_panic;
|
use human_panic::setup_panic;
|
||||||
|
use std::os::unix::fs;
|
||||||
use tracing::{
|
use tracing::{
|
||||||
Level,
|
Level,
|
||||||
info,
|
info,
|
||||||
@@ -10,11 +11,11 @@ use tracing::{
|
|||||||
use tracing_subscriber::FmtSubscriber;
|
use tracing_subscriber::FmtSubscriber;
|
||||||
|
|
||||||
mod chat;
|
mod chat;
|
||||||
mod command;
|
mod commands;
|
||||||
mod qna;
|
mod qna;
|
||||||
mod setup;
|
mod setup;
|
||||||
|
|
||||||
const DEFAULT_INSTRUCT: &'static str =
|
const DEFAULT_INSTRUCT: &str =
|
||||||
"You are a shady, yet helpful IRC bot. You try to give responses that can
|
"You are a shady, yet helpful IRC bot. You try to give responses that can
|
||||||
be sent in a single IRC response according to the specification.";
|
be sent in a single IRC response according to the specification.";
|
||||||
|
|
||||||
@@ -36,6 +37,13 @@ async fn main() -> Result<()> {
|
|||||||
let settings = setup::init().await.wrap_err("Failed to initialize.")?;
|
let settings = setup::init().await.wrap_err("Failed to initialize.")?;
|
||||||
let config = settings.config;
|
let config = settings.config;
|
||||||
|
|
||||||
|
// chroot if applicable.
|
||||||
|
if let Ok(chroot_path) = config.get_string("chroot-dir") {
|
||||||
|
fs::chroot(&chroot_path)
|
||||||
|
.wrap_err_with(|| format!("Failed setting chroot '{}'", chroot_path.to_string()))?;
|
||||||
|
std::env::set_current_dir("/").wrap_err("Couldn't change directory after chroot.")?;
|
||||||
|
}
|
||||||
|
|
||||||
let handle = qna::new(
|
let handle = qna::new(
|
||||||
config.get_string("api-key").wrap_err("API missing.")?,
|
config.get_string("api-key").wrap_err("API missing.")?,
|
||||||
config
|
config
|
||||||
@@ -44,7 +52,9 @@ async fn main() -> Result<()> {
|
|||||||
config
|
config
|
||||||
.get_string("model")
|
.get_string("model")
|
||||||
.wrap_err("model string missing.")?,
|
.wrap_err("model string missing.")?,
|
||||||
config.get_string("instruct").unwrap_or_else(|_| DEFAULT_INSTRUCT.to_string()),
|
config
|
||||||
|
.get_string("instruct")
|
||||||
|
.unwrap_or_else(|_| DEFAULT_INSTRUCT.to_string()),
|
||||||
)
|
)
|
||||||
.wrap_err("Couldn't initialize LLM handle.")?;
|
.wrap_err("Couldn't initialize LLM handle.")?;
|
||||||
let mut c = chat::new(&config, &handle).await?;
|
let mut c = chat::new(&config, &handle).await?;
|
||||||
|
|||||||
22
src/setup.rs
22
src/setup.rs
@@ -1,15 +1,9 @@
|
|||||||
use clap::Parser;
|
use clap::Parser;
|
||||||
use color_eyre::{
|
use color_eyre::{Result, eyre::WrapErr};
|
||||||
Result,
|
|
||||||
eyre::WrapErr,
|
|
||||||
};
|
|
||||||
use config::Config;
|
use config::Config;
|
||||||
use directories::ProjectDirs;
|
use directories::ProjectDirs;
|
||||||
use std::path::PathBuf;
|
use std::path::PathBuf;
|
||||||
use tracing::{
|
use tracing::{info, instrument};
|
||||||
info,
|
|
||||||
instrument,
|
|
||||||
};
|
|
||||||
|
|
||||||
// TODO: use [clap(long, short, help_heading = Some(section))]
|
// TODO: use [clap(long, short, help_heading = Some(section))]
|
||||||
#[derive(Clone, Debug, Parser)]
|
#[derive(Clone, Debug, Parser)]
|
||||||
@@ -23,6 +17,14 @@ pub(crate) struct Args {
|
|||||||
/// Base URL for the LLM API to use.
|
/// Base URL for the LLM API to use.
|
||||||
pub(crate) base_url: Option<String>,
|
pub(crate) base_url: Option<String>,
|
||||||
|
|
||||||
|
/// Directory to use for chroot (recommended).
|
||||||
|
#[arg(long)]
|
||||||
|
pub(crate) chroot_dir: Option<String>,
|
||||||
|
|
||||||
|
/// Root directory for file based command structure.
|
||||||
|
#[arg(long)]
|
||||||
|
pub(crate) command_dir: Option<String>,
|
||||||
|
|
||||||
#[arg(long)]
|
#[arg(long)]
|
||||||
/// Instructions to the model on how to behave.
|
/// Instructions to the model on how to behave.
|
||||||
pub(crate) intruct: Option<String>,
|
pub(crate) intruct: Option<String>,
|
||||||
@@ -30,7 +32,7 @@ pub(crate) struct Args {
|
|||||||
#[arg(long)]
|
#[arg(long)]
|
||||||
pub(crate) model: Option<String>,
|
pub(crate) model: Option<String>,
|
||||||
|
|
||||||
#[arg(long)]
|
#[arg(long = "channel")]
|
||||||
/// List of IRC channels to join.
|
/// List of IRC channels to join.
|
||||||
pub(crate) channels: Option<Vec<String>>,
|
pub(crate) channels: Option<Vec<String>>,
|
||||||
|
|
||||||
@@ -93,6 +95,8 @@ pub async fn init() -> Result<Setup> {
|
|||||||
// but a derive macro could do this a bit better if this becomes too large.
|
// but a derive macro could do this a bit better if this becomes too large.
|
||||||
.set_override_option("api-key", args.api_key.clone())?
|
.set_override_option("api-key", args.api_key.clone())?
|
||||||
.set_override_option("base-url", args.base_url.clone())?
|
.set_override_option("base-url", args.base_url.clone())?
|
||||||
|
.set_override_option("chroot-dir", args.chroot_dir.clone())?
|
||||||
|
.set_override_option("command-path", args.command_dir.clone())?
|
||||||
.set_override_option("model", args.model.clone())?
|
.set_override_option("model", args.model.clone())?
|
||||||
.set_override_option("instruct", args.model.clone())?
|
.set_override_option("instruct", args.model.clone())?
|
||||||
.set_override_option("channels", args.channels.clone())?
|
.set_override_option("channels", args.channels.clone())?
|
||||||
|
|||||||
Reference in New Issue
Block a user