Added README, and LICENSE.

This commit is contained in:
Micheal Smith
2025-08-11 23:37:18 -05:00
parent b86e46fe00
commit 5f30fdbf77
9 changed files with 182 additions and 63 deletions

43
Cargo.lock generated
View File

@@ -796,9 +796,9 @@ dependencies = [
[[package]] [[package]]
name = "genai" name = "genai"
version = "0.4.0-alpha.9" version = "0.4.0-alpha.12"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "21d0635fdb38ec31765faf40342114085af28c70afc3cc86cb20d5be7b3c5eac" checksum = "2b3abc148e5048f76d4472b277015efac351b08a5bdfcbcd72a37ced67e4c538"
dependencies = [ dependencies = [
"bytes", "bytes",
"derive_more 2.0.1", "derive_more 2.0.1",
@@ -1408,26 +1408,6 @@ dependencies = [
"winapi", "winapi",
] ]
[[package]]
name = "nugem"
version = "0.1.0"
dependencies = [
"better-panic",
"clap",
"color-eyre",
"config",
"directories",
"dotenvy_macro",
"futures",
"genai",
"human-panic",
"irc",
"tokio",
"tracing",
"tracing-subscriber",
"url",
]
[[package]] [[package]]
name = "num-conv" name = "num-conv"
version = "0.1.0" version = "0.1.0"
@@ -1930,6 +1910,25 @@ dependencies = [
"windows-sys 0.52.0", "windows-sys 0.52.0",
] ]
[[package]]
name = "robotnik"
version = "0.1.0"
dependencies = [
"better-panic",
"clap",
"color-eyre",
"config",
"directories",
"dotenvy_macro",
"futures",
"genai",
"human-panic",
"irc",
"tokio",
"tracing",
"tracing-subscriber",
]
[[package]] [[package]]
name = "ron" name = "ron"
version = "0.8.1" version = "0.8.1"

View File

@@ -13,7 +13,7 @@ directories = "6.0"
dotenvy_macro = "0.15" dotenvy_macro = "0.15"
futures = "0.3" futures = "0.3"
human-panic = "2.0" human-panic = "2.0"
genai = "0.4.0-alpha.9" genai = "0.4.0-alpha.11"
irc = "1.1" irc = "1.1"
tokio = { version = "1", features = [ "full" ] } tokio = { version = "1", features = [ "full" ] }
tracing = "0.1" tracing = "0.1"

11
LICENSE Normal file
View File

@@ -0,0 +1,11 @@
Copyright 2025 Micheal Smith
Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:
1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution.
3. Redistributions in any form must retain this license verbatim. No additional licensing terms, including but not limited to the GNU General Public License, may be imposed on the original or modified work.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS “AS IS” AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.

22
README.md Normal file
View File

@@ -0,0 +1,22 @@
# Robotnik - A Basic LLM Capable IRC Bot.
This is an IRC bot that. The name is based on a fictional video game villain.
Currently it supports any LLM that uses the OpenAI style of interface. They
can be selected via command line options, environment variables, or via a configuration
file. There is a [configuration file](config.toml) that *should* contain all available options
currently.
## Some supported but ~~possibly~~ *mostly* untested LLMs:
| Name | Model | Base URL | Tested |
|------------|-------------------|-------------------------------------------|---------|
| OpenAI | gpt-5 | https://api.openai.com/v1 | no |
| Deepseek | deepseek-chat | https://api.deepseek.com/v1 | yes |
| Anthropic | claude-sonnet-4-0 | https://api.anthropic.com/v1 | no |
| Gemini | gemini-2.5-turbo | https://generativelanguage.googleapis.com | no |
| OpenRouter | some-model | https://api.openrouter.ai/v1 | no |
## Further reading...
There should be a man page that might be useful. Otherwise the -h/--help
switch should hopefully suffice.

42
robotnik.1 Normal file
View File

@@ -0,0 +1,42 @@
.Dd $Mdocdate$
.Dt robotnik 1
.Os
.Sh NAME
.Nm robotnik
.Nd A simple bot that among other things uses the OpenAI API.
.\" .Sh LIBRARY
.\" For sections 2, 3, and 9 only.
.\" Not used in OpenBSD.
.Sh SYNOPSIS
.Nm progname
.Op Fl options
.Ar
.Sh DESCRIPTION
The
.Nm
utility processes files ...
.\" .Sh CONTEXT
.\" For section 9 functions only.
.\" .Sh IMPLEMENTATION NOTES
.\" Not used in OpenBSD.
.\" .Sh RETURN VALUES
.\" For sections 2, 3, and 9 function return values only.
.\" .Sh ENVIRONMENT
.\" For sections 1, 6, 7, and 8 only.
.\" .Sh FILES
.\" .Sh EXIT STATUS
.\" For sections 1, 6, and 8 only.
.\" .Sh EXAMPLES
.\" .Sh DIAGNOSTICS
.\" For sections 1, 4, 6, 7, 8, and 9 printf/stderr messages only.
.\" .Sh ERRORS
.\" For sections 2, 3, 4, and 9 errno settings only.
.\" .Sh SEE ALSO
.\" .Xr foobar 1
.\" .Sh STANDARDS
.\" .Sh HISTORY
.\" .Sh AUTHORS
.\" .Sh CAVEATS
.\" .Sh BUGS
.\" .Sh SECURITY CONSIDERATIONS
.\" Not used in OpenBSD.

View File

@@ -6,7 +6,10 @@ use color_eyre::{
}, },
}; };
// Lots of namespace confusion potential // Lots of namespace confusion potential
use crate::qna::LLMHandle; use crate::{
commands,
qna::LLMHandle,
};
use config::Config as MainConfig; use config::Config as MainConfig;
use futures::StreamExt; use futures::StreamExt;
use irc::client::prelude::{ use irc::client::prelude::{
@@ -28,12 +31,14 @@ pub struct Chat {
// Need: owners, channels, username, nick, server, password // Need: owners, channels, username, nick, server, password
#[instrument] #[instrument]
pub async fn new(settings: &MainConfig, handle: &LLMHandle) -> Result<Chat> { pub async fn new(
settings: &MainConfig,
handle: &LLMHandle,
) -> Result<Chat> {
// Going to just assign and let the irc library handle errors for now, and // Going to just assign and let the irc library handle errors for now, and
// add my own checking if necessary. // add my own checking if necessary.
let port: u16 = settings.get("port")?; let port: u16 = settings.get("port")?;
let channels: Vec<String> = settings.get("channels") let channels: Vec<String> = settings.get("channels").wrap_err("No channels provided.")?;
.wrap_err("No channels provided.")?;
event!(Level::INFO, "Channels = {:?}", channels); event!(Level::INFO, "Channels = {:?}", channels);
@@ -73,8 +78,9 @@ impl Chat {
}); });
while let Some(message) = stream.next().await.transpose()? { while let Some(message) = stream.next().await.transpose()? {
if let Command::PRIVMSG(channel, message) = message.command { if let Command::PRIVMSG(channel, message) = message.command
if message.starts_with("!gem") { && message.starts_with("!gem")
{
let msg = self.llm_handle.send_request(message).await?; let msg = self.llm_handle.send_request(message).await?;
event!(Level::INFO, "Message received."); event!(Level::INFO, "Message received.");
client client
@@ -82,7 +88,6 @@ impl Chat {
.wrap_err("Couldn't send response to channel.")?; .wrap_err("Couldn't send response to channel.")?;
} }
} }
}
Ok(()) Ok(())
} }

21
src/commands.rs Normal file
View File

@@ -0,0 +1,21 @@
use color_eyre::Result;
use std::{
path::{Path, PathBuf},
};
#[derive(Clone, Debug)]
pub struct Root {
path: PathBuf,
}
impl Root {
pub fn new(path: impl AsRef<Path>) -> Self {
Root {
path: path.as_ref().to_owned(),
}
}
pub fn run_command(cmd_string: impl AsRef<str>) -> Result<()> {
todo!();
}
}

View File

@@ -39,16 +39,26 @@ async fn main() -> Result<()> {
// chroot if applicable. // chroot if applicable.
if let Ok(chroot_path) = config.get_string("chroot-dir") { if let Ok(chroot_path) = config.get_string("chroot-dir") {
info!("Attempting to chroot to {}", chroot_path);
fs::chroot(&chroot_path) fs::chroot(&chroot_path)
.wrap_err_with(|| format!("Failed setting chroot '{}'", chroot_path.to_string()))?; .wrap_err_with(|| format!("Failed setting chroot '{}'", chroot_path))?;
std::env::set_current_dir("/").wrap_err("Couldn't change directory after chroot.")?; std::env::set_current_dir("/").wrap_err("Couldn't change directory after chroot.")?;
} }
let handle = qna::new( // Setup root path for commands.
let cmd_root = if let Ok(command_path) = config.get_string("command-path") {
Some(commands::Root::new(command_path))
} else {
None
};
let handle = qna::LLMHandle::new(
config.get_string("api-key").wrap_err("API missing.")?, config.get_string("api-key").wrap_err("API missing.")?,
config config
.get_string("base-url") .get_string("base-url")
.wrap_err("base-url missing.")?, .wrap_err("base-url missing.")?,
cmd_root,
config config
.get_string("model") .get_string("model")
.wrap_err("model string missing.")?, .wrap_err("model string missing.")?,

View File

@@ -1,3 +1,4 @@
use crate::commands;
use color_eyre::Result; use color_eyre::Result;
use futures::StreamExt; use futures::StreamExt;
use genai::{ use genai::{
@@ -22,12 +23,15 @@ use tracing::info;
pub struct LLMHandle { pub struct LLMHandle {
chat_request: ChatRequest, chat_request: ChatRequest,
client: Client, client: Client,
cmd_root: Option<commands::Root>,
model: String, model: String,
} }
impl LLMHandle {
pub fn new( pub fn new(
api_key: String, api_key: String,
_base_url: impl AsRef<str>, _base_url: impl AsRef<str>,
cmd_root: Option<commands::Root>,
model: impl Into<String>, model: impl Into<String>,
system_role: String, system_role: String,
) -> Result<LLMHandle> { ) -> Result<LLMHandle> {
@@ -47,11 +51,11 @@ pub fn new(
Ok(LLMHandle { Ok(LLMHandle {
client, client,
chat_request, chat_request,
cmd_root,
model: model.into(), model: model.into(),
}) })
} }
impl LLMHandle {
pub async fn send_request(&mut self, message: impl Into<String>) -> Result<String> { pub async fn send_request(&mut self, message: impl Into<String>) -> Result<String> {
let mut req = self.chat_request.clone(); let mut req = self.chat_request.clone();
let client = self.client.clone(); let client = self.client.clone();
@@ -66,6 +70,11 @@ impl LLMHandle {
while let Some(Ok(stream_event)) = stream.next().await { while let Some(Ok(stream_event)) = stream.next().await {
if let ChatStreamEvent::Chunk(StreamChunk { content }) = stream_event { if let ChatStreamEvent::Chunk(StreamChunk { content }) = stream_event {
text.push_str(&content); text.push_str(&content);
} else if let ChatStreamEvent::End(end) = stream_event {
let texts = end.captured_texts().unwrap();
for text in texts.into_iter() {
info!("An answer: {}", text);
}
} }
} }