diff --git a/README.md b/README.md index 1d7e3a3..7ae2911 100644 --- a/README.md +++ b/README.md @@ -1,31 +1,89 @@ -# Streaming ChatGPT for Shell and Neovim -AI assistants are transformational for programmers. However, ChatGPT 4 is also relatively slow. Streaming its responses greatly improves the user experience. These utilities attempts to bring these tools closer to the command-line and editor while preserving streaming. There are three parts here: +# Streaming LLM for Shell and Neovim + +AI assistants are transformational for programmers. However, models like ChatGPT 4 are also relatively slow. Streaming their responses greatly improves the user experience. These utilities attempts to bring these tools closer to the command-line and editor while preserving streaming. There are three parts here: + 1. A Rust binary that streams completion responses to stdin 2. A shell script that builds a little REPL over that binary 3. A Neovim Lua plug-in that brings this functionality into the editor - ## Rust program + The Rust program can be built with `cargo build`. It expects an `OPENAI_API_KEY` and/or an `ANTHROPIC_API_KEY` environment variable. If both keys are provided, Anthropic is used. The Rust program can take two kinds of input, read from stdin: -1. Raw input -In this case, a System prompt is provided in the compiled code -2. Transcript -The Rust program also accepts a homegrown "transcript" format in which transcript sections are delineated by lines which look like this + +1. **Raw input:** In this case, a System prompt is provided in the compiled code +2. **Transcript:** The Rust program also accepts a homegrown "transcript" format in which transcript sections are delineated by lines which look like this ``` ===USER=== ``` -If a transcript does not start with a System section, then the default System prompt is used. -## Lua script -The included lua script can be copied to `.config/nvim/lua` and installed with something like +If a transcript does not start with a System section, then the default System prompt is used. The default prompt can be customized with contents from a file passed as a first argument to the executable. + +To override the default Anthropic model (`claude-opus-4-6`), specify the desired model via the `ANTHROPIC_MODEL` environment variable. + +To override the default OpenAI model (`gpt-5`), set the `OPENAI_MODEL` environment variable to the desired value. + +## Installation + +### Using `git clone` + +``` +mkdir -p ~/.config/nvim/pack/bundle/start +git clone https://github.com/wolffiex/shellbot.git ~/.config/nvim/pack/bundle/start/shellbot +cd ~/.config/nvim/pack/bundle/start/shellbot +cargo build +``` + +### Using `packer.nvim` + +```lua +use { + 'wolffiex/shellbot', + run = 'cargo build' +} ``` -vim.cmd("command! ChatGPT lua require'chatgpt'.chatgpt()") + +### Using `vim-plug` + +```vim +Plug 'wolffiex/shellbot', { 'do': 'cargo build' } ``` -This command locates the Rust binary through the `SHELLBOT` environment variable. This should be set to the absolute path of the rust binary built in the step above. +### Using `dein.vim` + +```vim +call dein#add('wolffiex/shellbot', { 'build': 'cargo build' }) +``` + +### Using `lazy.nvim` + +```lua +{ + 'wolffiex/shellbot', + build = 'cargo build' +} +``` + +### Using `Vundle` + +```vim +Plugin 'wolffiex/shellbot' +``` + +After installation, run `:!cargo build` in the plugin directory. + +## Neovim commands + +### `:Shellbot` + +The plugin defines a `:Shellbot` command that locates the Rust binary through the `SHELLBOT` environment variable. This should be set to the absolute path of the Rust binary built in the step above. This plugin is optimized to allow for streaming. It attempts to keep new input in view by repositioning the cursor at the end of the buffer as new text is appended. The plugin takes care to work in the case that the user switches away from the window where the response is coming in. To turn off the cursor movement while a response is streaming, hit "Enter" or "Space." This will free the cursor for the rest of the response. +### `:checkhealth shellbot` + +Verifies that the file defined by `SHELLBOT` exists and is executable. + ## Shell script + `shellbot.sh` can be used from the command line in cases where the editor isn't active. Because it uses `fold` for word wrap, it works best in a narrow window. The first prompt comes from $EDITOR. Subsequent prompts are taken with `read`. Hitting enter on a blank line does submit. diff --git a/ftplugin/shellbot.lua b/ftplugin/shellbot.lua new file mode 100644 index 0000000..875c549 --- /dev/null +++ b/ftplugin/shellbot.lua @@ -0,0 +1,18 @@ +vim.bo.buflisted = true +vim.bo.buftype = 'nofile' +vim.bo.modified = false +vim.bo.textwidth = 0 +vim.wo.breakindent = true +vim.wo.linebreak = true +vim.wo.list = false +vim.wo.number = false +vim.wo.relativenumber = false +vim.wo.showbreak = 'NONE' +vim.wo.wrap = true + +local has_shellbot = pcall(require, 'chatbot') +if has_shellbot then + vim.keymap.set({ 'i', 'n' }, '', ChatBotSubmit, { buffer = true }) + vim.keymap.set({ 'i', 'n' }, '', ChatBotSubmit, { buffer = true }) + vim.keymap.set({ 'i', 'n' }, '', ChatBotNewBuf, { buffer = true }) +end diff --git a/chatbot.lua b/lua/chatbot.lua similarity index 88% rename from chatbot.lua rename to lua/chatbot.lua index c22f087..1021c8e 100644 --- a/chatbot.lua +++ b/lua/chatbot.lua @@ -4,11 +4,14 @@ local is_receiving = false local bot_cmd = os.getenv("SHELLBOT") local separator = "===" +local nbsp = ' ' local roles = { - USER = "◭🧑 " .. os.getenv('USER'), - ASSISTANT = "◮🤖 vimbot", + USER = nbsp .. "🤓 «" .. os.getenv('USER') .. "»" .. nbsp, + ASSISTANT = nbsp .. "🤖 «vimbot»" .. nbsp, } +local buffer_env = {} + local buffer_sync_cursor = {} function ChatBotCancelCursorSync() local bufnr = vim.api.nvim_get_current_buf() @@ -43,6 +46,10 @@ function ChatBotSubmit() vim.cmd("normal! Go") local winnr = vim.api.nvim_get_current_win() local bufnr = vim.api.nvim_get_current_buf() + local env = buffer_env[bufnr] and vim.tbl_extend('keep', buffer_env[bufnr], { + SHELLBOT_LOG_FILE = vim.env['SHELLBOT_LOG_FILE'], + }) + local clear_env = not not env buffer_sync_cursor[bufnr] = true local function receive_stream(_, data, _) if #data > 1 or data[1] ~= '' then @@ -95,9 +102,9 @@ function ChatBotSubmit() local function get_transcript() local lines = vim.api.nvim_buf_get_lines(bufnr, 0, -1, false) for i, line in ipairs(lines) do - if line:match("^◭") then -- '^' means start of line + if line:match('^' .. nbsp .. '🤓') then -- '^' means start of line lines[i] = separator .. "USER" .. separator - elseif line:match("^◮") then + elseif line:match('^' .. nbsp ..'🤖') then lines[i] = separator .. "ASSISTANT" .. separator end end @@ -112,6 +119,8 @@ function ChatBotSubmit() local output = {} local job_id = vim.fn.jobstart(bot_cmd, { + clear_env = clear_env, + env = env, on_stdout = function(_, data, _) if data[1] ~= "" then table.insert(output, data[1]) @@ -154,6 +163,8 @@ function ChatBotSubmit() end local job_id = vim.fn.jobstart(bot_cmd, { + clear_env = clear_env, + env = env, on_stdout = receive_stream, on_exit = stream_done, on_stderr = function(_, data, _) @@ -202,40 +213,25 @@ function ChatBotSubmit() end function ChatBotNewBuf() + local bufnr = vim.api.nvim_get_current_buf() vim.cmd("enew") - ChatBotInit() + ChatBotInit(buffer_env[bufnr]) end -function ChatBotInit() +function ChatBotInit(env) local winnr = vim.api.nvim_get_current_win() local bufnr = vim.api.nvim_get_current_buf() + buffer_env[bufnr] = env buffer_sync_cursor[bufnr] = true - vim.wo.breakindent = true - vim.wo.wrap = true - vim.wo.linebreak = true - vim.api.nvim_buf_set_option(bufnr, 'filetype', 'shellbot') - vim.api.nvim_buf_set_option(bufnr, 'buftype', 'nofile') - vim.api.nvim_buf_set_option(bufnr, 'buflisted', true) - vim.api.nvim_buf_set_option(bufnr, 'modified', false) + vim.api.nvim_set_option_value('filetype', 'shellbot', { buf = bufnr }) add_transcript_header(winnr, bufnr, "USER", 0) - local modes = { 'n', 'i' } - for _, mode in ipairs(modes) do - vim.api.nvim_buf_set_keymap(bufnr, mode, '', ':lua ChatBotSubmit()', - { noremap = true, silent = true }) - vim.api.nvim_buf_set_keymap(bufnr, mode, '', ':lua ChatBotNewBuf()', - { noremap = true, silent = true }) - end end -function M.chatbot() +function M.chatbot(env) vim.cmd("botright vnew") vim.cmd("set winfixwidth") vim.cmd("vertical resize 60") - ChatBotInit() -end - -function M.chatbot_init() - ChatBotInit() + ChatBotInit(env) end function ChatBotCancelResponse() diff --git a/lua/shellbot/health.lua b/lua/shellbot/health.lua new file mode 100644 index 0000000..9efdd85 --- /dev/null +++ b/lua/shellbot/health.lua @@ -0,0 +1,21 @@ +local health = vim.health -- after: https://github.com/neovim/neovim/pull/18720 + or require('health') -- before: v0.8.x + +return { + -- Run with `:checkhealth shellbot` + check = function() + local shellbot = vim.env['SHELLBOT'] + if shellbot == nil then + health.warn('SHELLBOT environment variable is not set') + else + local executable = vim.fn.split(shellbot, ' ')[1] + if executable == nil then + health.warn('SHELLBOT environment variable is empty') + elseif vim.fn.executable(executable) ~= 1 then + health.warn('SHELLBOT (' .. vim.inspect(shellbot) .. ') is not executable') + else + health.ok('SHELLBOT environment variable is set to an executable') + end + end + end, +} diff --git a/plugin/shellbot.lua b/plugin/shellbot.lua new file mode 100644 index 0000000..28b7a47 --- /dev/null +++ b/plugin/shellbot.lua @@ -0,0 +1,12 @@ +vim.api.nvim_create_user_command('Shellbot', function() + local shellbot = require('chatbot') + local env = vim.env['SHELLBOT'] + if env ~= nil then + local executable = vim.fn.split(env, ' ')[1] + if executable ~= nil and vim.fn.executable(executable) == 1 then + shellbot.chatbot() + return + end + end + vim.api.nvim_err_writeln('error: SHELLBOT does not appear to be executable') +end, {}) diff --git a/shellbot/health.lua b/shellbot/health.lua deleted file mode 100644 index 4496c37..0000000 --- a/shellbot/health.lua +++ /dev/null @@ -1,16 +0,0 @@ -local health = vim.health -- after: https://github.com/neovim/neovim/pull/18720 - or require('health') -- before: v0.8.x - -return { - -- Run with `:checkhealth shellbot` - check = function() - local shellbot = vim.env['SHELLBOT'] - if shellbot == nil then - health.warn('SHELLBOT environment variable is not set') - elseif vim.fn.executable(shellbot) ~= 1 then - health.warn('SHELLBOT (' .. vim.inspect(shellbot) .. ') is not executable') - else - health.ok('SHELLBOT environment variable is set to an executable') - end - end, -} diff --git a/src/anthropic.rs b/src/anthropic.rs index b3fe064..ace8487 100644 --- a/src/anthropic.rs +++ b/src/anthropic.rs @@ -5,8 +5,8 @@ use reqwest::header::{HeaderMap, HeaderValue}; use reqwest::{Client, RequestBuilder}; use serde::{Deserialize, Serialize}; -const MODEL: &str = "claude-3-opus-20240229"; -pub fn get_request(api_key: &str, request: ChatRequest) -> RequestBuilder { +const MODEL: &str = "claude-opus-4-6"; +pub fn get_request(api_key: &str, model: &str, request: ChatRequest) -> RequestBuilder { let client = Client::new(); let url = "https://api.anthropic.com/v1/messages"; let mut headers = HeaderMap::new(); @@ -28,7 +28,11 @@ pub fn get_request(api_key: &str, request: ChatRequest) -> RequestBuilder { ); let request = RequestJSON { - model: MODEL.to_string(), + model: if model.is_empty() { + MODEL.to_string() + } else { + model.to_string() + }, system: request.system_prompt, messages: request.transcript, stream: true, diff --git a/src/api.rs b/src/api.rs index c0ba805..b520a9b 100644 --- a/src/api.rs +++ b/src/api.rs @@ -11,14 +11,18 @@ use crate::sse::SSEConverter; use crate::sse::SSEvent; pub enum ApiProvider { - OpenAI(String), - Anthropic(String), + OpenAI(String, String), + Anthropic(String, String), } pub fn stream_response<'a>(provider: ApiProvider, request: ChatRequest) -> Receiver { let request = match provider { - ApiProvider::OpenAI(ref api_key) => openai::get_request(&api_key, request), - ApiProvider::Anthropic(ref api_key) => anthropic::get_request(&api_key, request), + ApiProvider::OpenAI(ref api_key, ref model) => { + openai::get_request(&api_key, &model, request) + } + ApiProvider::Anthropic(ref api_key, ref model) => { + anthropic::get_request(&api_key, &model, request) + } }; let (sender, receiver) = mpsc::channel(100); tokio::spawn(async move { send_response(&provider, request, sender).await }); @@ -98,8 +102,8 @@ fn convert_chunk(chunk: Bytes) -> String { fn process_sse(provider: &ApiProvider, event: SSEvent) -> Option { match provider { - ApiProvider::Anthropic(_) => anthropic::convert_sse(event), - ApiProvider::OpenAI(_) => openai::convert_sse(event), + ApiProvider::Anthropic(_, _) => anthropic::convert_sse(event), + ApiProvider::OpenAI(_, _) => openai::convert_sse(event), } } diff --git a/src/main.rs b/src/main.rs index 0a4f455..d621a31 100644 --- a/src/main.rs +++ b/src/main.rs @@ -38,8 +38,16 @@ async fn main() { } let request = structure_input(); let provider = std::env::var("ANTHROPIC_API_KEY") - .map(ApiProvider::Anthropic) - .or_else(|_| std::env::var("OPENAI_API_KEY").map(ApiProvider::OpenAI)) + .map(|key| { + let model = std::env::var("ANTHROPIC_MODEL").unwrap_or_default(); + ApiProvider::Anthropic(key, model) + }) + .or_else(|_| { + std::env::var("OPENAI_API_KEY").map(|key| { + let model = std::env::var("OPENAI_MODEL").unwrap_or_default(); + ApiProvider::OpenAI(key, model) + }) + }) .unwrap_or_else(|_| panic!("No API key provided")); let mut receiver = stream_response(provider, request); @@ -71,13 +79,11 @@ fn structure_input() -> ChatRequest { let args: Vec = std::env::args().collect(); let system_prompt = if args.len() > 1 { let file_path = &args[1]; - println!("FILE {:?}", file_path); let mut file = File::open(file_path).unwrap_or_else(|_| { panic!("Failed to open file: {}", file_path); }); let mut contents = String::new(); file.read_to_string(&mut contents).unwrap(); - println!("contents {:?}", contents); contents } else { get_default_prompt() diff --git a/src/openai.rs b/src/openai.rs index 2c1cc44..3ba0081 100644 --- a/src/openai.rs +++ b/src/openai.rs @@ -4,12 +4,15 @@ use reqwest::header::{HeaderMap, HeaderValue}; use reqwest::{Client, RequestBuilder}; use serde::{Deserialize, Serialize}; -const MODEL: &str = "gpt-4"; -pub fn get_request(api_key: &str, request: ChatRequest) -> RequestBuilder { - let mut messages = vec![ChatMessage { - role: ChatRole::System, - content: request.system_prompt, - }]; +const MODEL: &str = "gpt-5"; +pub fn get_request(api_key: &str, model: &str, request: ChatRequest) -> RequestBuilder { + let mut messages = vec![]; + if model != "o1-preview" && model != "o1-mini" { + messages.push(ChatMessage { + role: ChatRole::System, + content: request.system_prompt, + }); + } messages.extend_from_slice(&request.transcript); let client = Client::new(); let url = "https://api.openai.com/v1/chat/completions"; @@ -23,7 +26,11 @@ pub fn get_request(api_key: &str, request: ChatRequest) -> RequestBuilder { HeaderValue::from_str(&format!("Bearer {}", api_key)).unwrap(), ); let request = RequestJSON { - model: MODEL.to_string(), + model: if model.is_empty() { + MODEL.to_string() + } else { + model.to_string() + }, stream: true, messages, }; diff --git a/syntax/shellbot.lua b/syntax/shellbot.lua new file mode 100644 index 0000000..ba77b46 --- /dev/null +++ b/syntax/shellbot.lua @@ -0,0 +1,25 @@ +if vim.fn.exists('main_syntax') == 0 then + if vim.fn.exists('b:current_syntax') == 1 then + return + end + vim.g.main_syntax = 'shellbot' +elseif vim.fn.exists('b:current_syntax') == 1 and vim.b.current_syntax == 'shellbot' then + return +end + +vim.cmd('runtime! syntax/markdown.vim') + +local cpo = vim.o.cpo + +vim.cmd([[ + set cpo&vim + syntax match ChatBotHeader /^ 🤓 .*/ containedin=ALL + syntax match ChatBotHeader /^ 🤖 .*/ containedin=ALL + highlight def link ChatBotHeader TermCursor +]]) + +vim.b.current_syntax = 'shellbot' +if vim.g.main_syntax == 'shellbot' then + vim.api.nvim_del_var('main_syntax') +end +vim.o.cpo = cpo