From 5f30fdbf77304dfb66879ea8b6a92dd8fec8319a Mon Sep 17 00:00:00 2001 From: Micheal Smith Date: Mon, 11 Aug 2025 23:37:18 -0500 Subject: [PATCH] Added README, and LICENSE. --- Cargo.lock | 43 +++++++++++++++++----------------- Cargo.toml | 2 +- LICENSE | 11 +++++++++ README.md | 22 ++++++++++++++++++ robotnik.1 | 42 ++++++++++++++++++++++++++++++++++ src/chat.rs | 29 +++++++++++++---------- src/commands.rs | 21 +++++++++++++++++ src/main.rs | 14 ++++++++++-- src/qna.rs | 61 ++++++++++++++++++++++++++++--------------------- 9 files changed, 182 insertions(+), 63 deletions(-) create mode 100644 LICENSE create mode 100644 README.md create mode 100644 robotnik.1 create mode 100644 src/commands.rs diff --git a/Cargo.lock b/Cargo.lock index 49acae8..2472657 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -796,9 +796,9 @@ dependencies = [ [[package]] name = "genai" -version = "0.4.0-alpha.9" +version = "0.4.0-alpha.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "21d0635fdb38ec31765faf40342114085af28c70afc3cc86cb20d5be7b3c5eac" +checksum = "2b3abc148e5048f76d4472b277015efac351b08a5bdfcbcd72a37ced67e4c538" dependencies = [ "bytes", "derive_more 2.0.1", @@ -1408,26 +1408,6 @@ dependencies = [ "winapi", ] -[[package]] -name = "nugem" -version = "0.1.0" -dependencies = [ - "better-panic", - "clap", - "color-eyre", - "config", - "directories", - "dotenvy_macro", - "futures", - "genai", - "human-panic", - "irc", - "tokio", - "tracing", - "tracing-subscriber", - "url", -] - [[package]] name = "num-conv" version = "0.1.0" @@ -1930,6 +1910,25 @@ dependencies = [ "windows-sys 0.52.0", ] +[[package]] +name = "robotnik" +version = "0.1.0" +dependencies = [ + "better-panic", + "clap", + "color-eyre", + "config", + "directories", + "dotenvy_macro", + "futures", + "genai", + "human-panic", + "irc", + "tokio", + "tracing", + "tracing-subscriber", +] + [[package]] name = "ron" version = "0.8.1" diff --git a/Cargo.toml b/Cargo.toml index 01ddb3b..a1fcc6a 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -13,7 +13,7 @@ directories = "6.0" dotenvy_macro = "0.15" futures = "0.3" human-panic = "2.0" -genai = "0.4.0-alpha.9" +genai = "0.4.0-alpha.11" irc = "1.1" tokio = { version = "1", features = [ "full" ] } tracing = "0.1" diff --git a/LICENSE b/LICENSE new file mode 100644 index 0000000..211483c --- /dev/null +++ b/LICENSE @@ -0,0 +1,11 @@ +Copyright 2025 Micheal Smith + +Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: + +1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. + +2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. + +3. Redistributions in any form must retain this license verbatim. No additional licensing terms, including but not limited to the GNU General Public License, may be imposed on the original or modified work. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS “AS IS” AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/README.md b/README.md new file mode 100644 index 0000000..80e6c35 --- /dev/null +++ b/README.md @@ -0,0 +1,22 @@ +# Robotnik - A Basic LLM Capable IRC Bot. + +This is an IRC bot that. The name is based on a fictional video game villain. +Currently it supports any LLM that uses the OpenAI style of interface. They +can be selected via command line options, environment variables, or via a configuration +file. There is a [configuration file](config.toml) that *should* contain all available options +currently. + +## Some supported but ~~possibly~~ *mostly* untested LLMs: + +| Name | Model | Base URL | Tested | +|------------|-------------------|-------------------------------------------|---------| +| OpenAI | gpt-5 | https://api.openai.com/v1 | no | +| Deepseek | deepseek-chat | https://api.deepseek.com/v1 | yes | +| Anthropic | claude-sonnet-4-0 | https://api.anthropic.com/v1 | no | +| Gemini | gemini-2.5-turbo | https://generativelanguage.googleapis.com | no | +| OpenRouter | some-model | https://api.openrouter.ai/v1 | no | + +## Further reading... + +There should be a man page that might be useful. Otherwise the -h/--help +switch should hopefully suffice. diff --git a/robotnik.1 b/robotnik.1 new file mode 100644 index 0000000..1b5009b --- /dev/null +++ b/robotnik.1 @@ -0,0 +1,42 @@ +.Dd $Mdocdate$ +.Dt robotnik 1 +.Os +.Sh NAME +.Nm robotnik +.Nd A simple bot that among other things uses the OpenAI API. +.\" .Sh LIBRARY +.\" For sections 2, 3, and 9 only. +.\" Not used in OpenBSD. +.Sh SYNOPSIS +.Nm progname +.Op Fl options +.Ar +.Sh DESCRIPTION +The +.Nm +utility processes files ... +.\" .Sh CONTEXT +.\" For section 9 functions only. +.\" .Sh IMPLEMENTATION NOTES +.\" Not used in OpenBSD. +.\" .Sh RETURN VALUES +.\" For sections 2, 3, and 9 function return values only. +.\" .Sh ENVIRONMENT +.\" For sections 1, 6, 7, and 8 only. +.\" .Sh FILES +.\" .Sh EXIT STATUS +.\" For sections 1, 6, and 8 only. +.\" .Sh EXAMPLES +.\" .Sh DIAGNOSTICS +.\" For sections 1, 4, 6, 7, 8, and 9 printf/stderr messages only. +.\" .Sh ERRORS +.\" For sections 2, 3, 4, and 9 errno settings only. +.\" .Sh SEE ALSO +.\" .Xr foobar 1 +.\" .Sh STANDARDS +.\" .Sh HISTORY +.\" .Sh AUTHORS +.\" .Sh CAVEATS +.\" .Sh BUGS +.\" .Sh SECURITY CONSIDERATIONS +.\" Not used in OpenBSD. diff --git a/src/chat.rs b/src/chat.rs index 8450c74..fb7f5f7 100644 --- a/src/chat.rs +++ b/src/chat.rs @@ -6,7 +6,10 @@ use color_eyre::{ }, }; // Lots of namespace confusion potential -use crate::qna::LLMHandle; +use crate::{ + commands, + qna::LLMHandle, +}; use config::Config as MainConfig; use futures::StreamExt; use irc::client::prelude::{ @@ -28,12 +31,14 @@ pub struct Chat { // Need: owners, channels, username, nick, server, password #[instrument] -pub async fn new(settings: &MainConfig, handle: &LLMHandle) -> Result { +pub async fn new( + settings: &MainConfig, + handle: &LLMHandle, +) -> Result { // Going to just assign and let the irc library handle errors for now, and // add my own checking if necessary. let port: u16 = settings.get("port")?; - let channels: Vec = settings.get("channels") - .wrap_err("No channels provided.")?; + let channels: Vec = settings.get("channels").wrap_err("No channels provided.")?; event!(Level::INFO, "Channels = {:?}", channels); @@ -73,14 +78,14 @@ impl Chat { }); while let Some(message) = stream.next().await.transpose()? { - if let Command::PRIVMSG(channel, message) = message.command { - if message.starts_with("!gem") { - let msg = self.llm_handle.send_request(message).await?; - event!(Level::INFO, "Message received."); - client - .send_privmsg(channel, msg) - .wrap_err("Couldn't send response to channel.")?; - } + if let Command::PRIVMSG(channel, message) = message.command + && message.starts_with("!gem") + { + let msg = self.llm_handle.send_request(message).await?; + event!(Level::INFO, "Message received."); + client + .send_privmsg(channel, msg) + .wrap_err("Couldn't send response to channel.")?; } } diff --git a/src/commands.rs b/src/commands.rs new file mode 100644 index 0000000..32282a9 --- /dev/null +++ b/src/commands.rs @@ -0,0 +1,21 @@ +use color_eyre::Result; +use std::{ + path::{Path, PathBuf}, +}; + +#[derive(Clone, Debug)] +pub struct Root { + path: PathBuf, +} + +impl Root { + pub fn new(path: impl AsRef) -> Self { + Root { + path: path.as_ref().to_owned(), + } + } + + pub fn run_command(cmd_string: impl AsRef) -> Result<()> { + todo!(); + } +} diff --git a/src/main.rs b/src/main.rs index b639706..ffda2ba 100644 --- a/src/main.rs +++ b/src/main.rs @@ -39,16 +39,26 @@ async fn main() -> Result<()> { // chroot if applicable. if let Ok(chroot_path) = config.get_string("chroot-dir") { + info!("Attempting to chroot to {}", chroot_path); fs::chroot(&chroot_path) - .wrap_err_with(|| format!("Failed setting chroot '{}'", chroot_path.to_string()))?; + .wrap_err_with(|| format!("Failed setting chroot '{}'", chroot_path))?; std::env::set_current_dir("/").wrap_err("Couldn't change directory after chroot.")?; + } - let handle = qna::new( + // Setup root path for commands. + let cmd_root = if let Ok(command_path) = config.get_string("command-path") { + Some(commands::Root::new(command_path)) + } else { + None + }; + + let handle = qna::LLMHandle::new( config.get_string("api-key").wrap_err("API missing.")?, config .get_string("base-url") .wrap_err("base-url missing.")?, + cmd_root, config .get_string("model") .wrap_err("model string missing.")?, diff --git a/src/qna.rs b/src/qna.rs index 559e4b8..454512f 100644 --- a/src/qna.rs +++ b/src/qna.rs @@ -1,3 +1,4 @@ +use crate::commands; use color_eyre::Result; use futures::StreamExt; use genai::{ @@ -22,36 +23,39 @@ use tracing::info; pub struct LLMHandle { chat_request: ChatRequest, client: Client, + cmd_root: Option, model: String, } -pub fn new( - api_key: String, - _base_url: impl AsRef, - model: impl Into, - system_role: String, -) -> Result { - let auth_resolver = AuthResolver::from_resolver_fn( - |_model_iden: ModelIden| -> Result, genai::resolver::Error> { - // let ModelIden { adapter_kind, model_name } = model_iden; - - Ok(Some(AuthData::from_single(api_key))) - }, - ); - - let client = Client::builder().with_auth_resolver(auth_resolver).build(); - let chat_request = ChatRequest::default().with_system(system_role); - - info!("New LLMHandle created."); - - Ok(LLMHandle { - client, - chat_request, - model: model.into(), - }) -} - impl LLMHandle { + pub fn new( + api_key: String, + _base_url: impl AsRef, + cmd_root: Option, + model: impl Into, + system_role: String, + ) -> Result { + let auth_resolver = AuthResolver::from_resolver_fn( + |_model_iden: ModelIden| -> Result, genai::resolver::Error> { + // let ModelIden { adapter_kind, model_name } = model_iden; + + Ok(Some(AuthData::from_single(api_key))) + }, + ); + + let client = Client::builder().with_auth_resolver(auth_resolver).build(); + let chat_request = ChatRequest::default().with_system(system_role); + + info!("New LLMHandle created."); + + Ok(LLMHandle { + client, + chat_request, + cmd_root, + model: model.into(), + }) + } + pub async fn send_request(&mut self, message: impl Into) -> Result { let mut req = self.chat_request.clone(); let client = self.client.clone(); @@ -66,6 +70,11 @@ impl LLMHandle { while let Some(Ok(stream_event)) = stream.next().await { if let ChatStreamEvent::Chunk(StreamChunk { content }) = stream_event { text.push_str(&content); + } else if let ChatStreamEvent::End(end) = stream_event { + let texts = end.captured_texts().unwrap(); + for text in texts.into_iter() { + info!("An answer: {}", text); + } } }