From 34918c0b40859df9c1f22bf73fbf4a242d6d6a96 Mon Sep 17 00:00:00 2001 From: Greg Hurrell Date: Mon, 2 Oct 2023 16:10:56 +0200 Subject: [PATCH 01/25] docs: fix formatting in README Despite the newline in the source, GitHub renders each of these numbered list items all on one line. User markup to make the separation obvious. --- README.md | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/README.md b/README.md index 1d7e3a3..ee6b385 100644 --- a/README.md +++ b/README.md @@ -7,10 +7,8 @@ AI assistants are transformational for programmers. However, ChatGPT 4 is also r ## Rust program The Rust program can be built with `cargo build`. It expects an `OPENAI_API_KEY` and/or an `ANTHROPIC_API_KEY` environment variable. If both keys are provided, Anthropic is used. The Rust program can take two kinds of input, read from stdin: -1. Raw input -In this case, a System prompt is provided in the compiled code -2. Transcript -The Rust program also accepts a homegrown "transcript" format in which transcript sections are delineated by lines which look like this +1. **Raw input:** In this case, a System prompt is provided in the compiled code +2. **Transcript:** The Rust program also accepts a homegrown "transcript" format in which transcript sections are delineated by lines which look like this ``` ===USER=== From 9d42b2f631480c3955b22a4b37030a766e68ac0b Mon Sep 17 00:00:00 2001 From: Greg Hurrell Date: Fri, 19 Jul 2024 10:36:56 +0200 Subject: [PATCH 02/25] chore: remove debugging `println!` These cause the prompt file contents to show up in the editor, which isn't desirable. --- src/main.rs | 2 -- 1 file changed, 2 deletions(-) diff --git a/src/main.rs b/src/main.rs index 0a4f455..e49f564 100644 --- a/src/main.rs +++ b/src/main.rs @@ -71,13 +71,11 @@ fn structure_input() -> ChatRequest { let args: Vec = std::env::args().collect(); let system_prompt = if args.len() > 1 { let file_path = &args[1]; - println!("FILE {:?}", file_path); let mut file = File::open(file_path).unwrap_or_else(|_| { panic!("Failed to open file: {}", file_path); }); let mut contents = String::new(); file.read_to_string(&mut contents).unwrap(); - println!("contents {:?}", contents); contents } else { get_default_prompt() From 91be26784e33c967f80369533ef714933f442ad6 Mon Sep 17 00:00:00 2001 From: Greg Hurrell Date: Fri, 19 Jul 2024 10:57:18 +0200 Subject: [PATCH 03/25] feat: refactor health check to allow spaces in SHELLBOT In ef73bd45e07dc9669b20d97aa4b06200e7ea11f3 we added a support for a command-line argument that can be used to override the default prompt, but actually supplying an arguments means that the old `vim.fn.executable` check won't work. The simplest fix is to just split on whitespace and check the first item to see if it's executable. This isn't perfect, because if somebody installed the executable at a path like `/My/Path With Spaces/bin/shellbot`, then we'd test `/My/Path` and report a false negative. But for the common case, this is still an improvement. --- shellbot/health.lua | 11 ++++++++--- 1 file changed, 8 insertions(+), 3 deletions(-) diff --git a/shellbot/health.lua b/shellbot/health.lua index 4496c37..9efdd85 100644 --- a/shellbot/health.lua +++ b/shellbot/health.lua @@ -7,10 +7,15 @@ return { local shellbot = vim.env['SHELLBOT'] if shellbot == nil then health.warn('SHELLBOT environment variable is not set') - elseif vim.fn.executable(shellbot) ~= 1 then - health.warn('SHELLBOT (' .. vim.inspect(shellbot) .. ') is not executable') else - health.ok('SHELLBOT environment variable is set to an executable') + local executable = vim.fn.split(shellbot, ' ')[1] + if executable == nil then + health.warn('SHELLBOT environment variable is empty') + elseif vim.fn.executable(executable) ~= 1 then + health.warn('SHELLBOT (' .. vim.inspect(shellbot) .. ') is not executable') + else + health.ok('SHELLBOT environment variable is set to an executable') + end end end, } From 7d5ad27a33b7ea096403bf5d9b31ce701be52df2 Mon Sep 17 00:00:00 2001 From: Greg Hurrell Date: Sat, 21 Oct 2023 01:27:28 +0200 Subject: [PATCH 04/25] style: make some cosmetic improvements MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit On my machine: - ◭ ("Up-pointing triangle with left half black") - ◮ ("Up-pointing triangle with right half black") look pretty bad in the terminal, so swap them for something else. --- chatbot.lua | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/chatbot.lua b/chatbot.lua index c22f087..04bce5b 100644 --- a/chatbot.lua +++ b/chatbot.lua @@ -4,9 +4,10 @@ local is_receiving = false local bot_cmd = os.getenv("SHELLBOT") local separator = "===" +local nbsp = ' ' local roles = { - USER = "◭🧑 " .. os.getenv('USER'), - ASSISTANT = "◮🤖 vimbot", + USER = " 🤓 «" .. os.getenv('USER') .. "»" .. nbsp, + ASSISTANT = " 🤖 «vimbot»" .. nbsp, } local buffer_sync_cursor = {} @@ -95,9 +96,9 @@ function ChatBotSubmit() local function get_transcript() local lines = vim.api.nvim_buf_get_lines(bufnr, 0, -1, false) for i, line in ipairs(lines) do - if line:match("^◭") then -- '^' means start of line + if line:match("^ 🤓") then -- '^' means start of line lines[i] = separator .. "USER" .. separator - elseif line:match("^◮") then + elseif line:match("^ 🤖") then lines[i] = separator .. "ASSISTANT" .. separator end end From 1a5ca7d128bba27e80b9ca3fece687d411025b98 Mon Sep 17 00:00:00 2001 From: Greg Hurrell Date: Sat, 21 Oct 2023 12:45:27 +0200 Subject: [PATCH 05/25] refactor!: use NBSP at front of headers as well Trying to distinguish from "normal" lines a little more explicitly. --- chatbot.lua | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/chatbot.lua b/chatbot.lua index 04bce5b..5643a82 100644 --- a/chatbot.lua +++ b/chatbot.lua @@ -6,8 +6,8 @@ local separator = "===" local nbsp = ' ' local roles = { - USER = " 🤓 «" .. os.getenv('USER') .. "»" .. nbsp, - ASSISTANT = " 🤖 «vimbot»" .. nbsp, + USER = nbsp .. "🤓 «" .. os.getenv('USER') .. "»" .. nbsp, + ASSISTANT = nbsp .. "🤖 «vimbot»" .. nbsp, } local buffer_sync_cursor = {} @@ -96,9 +96,9 @@ function ChatBotSubmit() local function get_transcript() local lines = vim.api.nvim_buf_get_lines(bufnr, 0, -1, false) for i, line in ipairs(lines) do - if line:match("^ 🤓") then -- '^' means start of line + if line:match('^' .. nbsp .. '🤓') then -- '^' means start of line lines[i] = separator .. "USER" .. separator - elseif line:match("^ 🤖") then + elseif line:match('^' .. nbsp ..'🤖') then lines[i] = separator .. "ASSISTANT" .. separator end end From 841a2ec88212c1f4a5646e49845d61fe5baaa735 Mon Sep 17 00:00:00 2001 From: Greg Hurrell Date: Fri, 19 Jul 2024 12:27:04 +0200 Subject: [PATCH 06/25] docs: fix outdated instruction in README.md Module was renamed in 919a92414c66ffdbb7856cc0342598d7a8fcc5c7, so these instructions are wrong now. --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index ee6b385..356690c 100644 --- a/README.md +++ b/README.md @@ -18,7 +18,7 @@ If a transcript does not start with a System section, then the default System pr ## Lua script The included lua script can be copied to `.config/nvim/lua` and installed with something like ``` -vim.cmd("command! ChatGPT lua require'chatgpt'.chatgpt()") +vim.cmd("command! ChatGPT lua require'chatbot'.chatbot()") ``` This command locates the Rust binary through the `SHELLBOT` environment variable. This should be set to the absolute path of the rust binary built in the step above. From d98d049997ac3ea333e3f74951dff91c933534a6 Mon Sep 17 00:00:00 2001 From: Greg Hurrell Date: Fri, 19 Jul 2024 17:17:41 +0200 Subject: [PATCH 07/25] chore: try GPT-4o MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Difference as per docs: - https://platform.openai.com/docs/models > GPT-4o > > Our high-intelligence flagship model for complex, multi‑step tasks > > Text and image input, text output > 128k context length > Input: $5 | Output: $15* > (* prices per 1M tokens) > > # GPT-4o: https://platform.openai.com/docs/models/gpt-4o > > GPT-4o (“o” for “omni”) is our most advanced model. It is > multimodal (accepting text or image inputs and outputting text), > and it has the same high intelligence as GPT-4 Turbo but is much > more efficient—it generates text 2x faster and is 50% cheaper. > Additionally, GPT-4o has the best vision and performance across > non-English languages of any of our models. GPT-4o is available in the > OpenAI API to paying customers. > > # GPT-4 Turbo and GPT-4: https://platform.openai.com/docs/models/gpt-4-turbo-and-gpt-4 > > GPT-4 is a large multimodal model (accepting text or image inputs > and outputting text) that can solve difficult problems with greater > accuracy than any of our previous models, thanks to its broader > general knowledge and advanced reasoning capabilities. GPT-4 is > available in the OpenAI API to paying customers. Like gpt-3.5-turbo, > GPT-4 is optimized for chat but works well for traditional completions > tasks using the Chat Completions API. --- src/openai.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/openai.rs b/src/openai.rs index 2c1cc44..4f8787a 100644 --- a/src/openai.rs +++ b/src/openai.rs @@ -4,7 +4,7 @@ use reqwest::header::{HeaderMap, HeaderValue}; use reqwest::{Client, RequestBuilder}; use serde::{Deserialize, Serialize}; -const MODEL: &str = "gpt-4"; +const MODEL: &str = "gpt-4o"; pub fn get_request(api_key: &str, request: ChatRequest) -> RequestBuilder { let mut messages = vec![ChatMessage { role: ChatRole::System, From d95b5243f0714eb79a7da3fda157768014f2f4d0 Mon Sep 17 00:00:00 2001 From: Greg Hurrell Date: Fri, 11 Oct 2024 23:06:20 +0200 Subject: [PATCH 08/25] refactor: move Neovim files into `lua/` subdirectory As noted in: - https://github.com/wolffiex/shellbot/pull/10 This allows the repo to be installed directly as a Neovim plugin without having to copy files around, and opens the door to us shipping a syntax file, which I'll do in the next commit. --- chatbot.lua => lua/chatbot.lua | 0 {shellbot => lua/shellbot}/health.lua | 0 2 files changed, 0 insertions(+), 0 deletions(-) rename chatbot.lua => lua/chatbot.lua (100%) rename {shellbot => lua/shellbot}/health.lua (100%) diff --git a/chatbot.lua b/lua/chatbot.lua similarity index 100% rename from chatbot.lua rename to lua/chatbot.lua diff --git a/shellbot/health.lua b/lua/shellbot/health.lua similarity index 100% rename from shellbot/health.lua rename to lua/shellbot/health.lua From 0682fbd3c9ffba44b363446039b0da8cd3cf83ca Mon Sep 17 00:00:00 2001 From: Greg Hurrell Date: Fri, 11 Oct 2024 23:07:49 +0200 Subject: [PATCH 09/25] feat: add syntax file --- syntax/shellbot.lua | 25 +++++++++++++++++++++++++ 1 file changed, 25 insertions(+) create mode 100644 syntax/shellbot.lua diff --git a/syntax/shellbot.lua b/syntax/shellbot.lua new file mode 100644 index 0000000..ba77b46 --- /dev/null +++ b/syntax/shellbot.lua @@ -0,0 +1,25 @@ +if vim.fn.exists('main_syntax') == 0 then + if vim.fn.exists('b:current_syntax') == 1 then + return + end + vim.g.main_syntax = 'shellbot' +elseif vim.fn.exists('b:current_syntax') == 1 and vim.b.current_syntax == 'shellbot' then + return +end + +vim.cmd('runtime! syntax/markdown.vim') + +local cpo = vim.o.cpo + +vim.cmd([[ + set cpo&vim + syntax match ChatBotHeader /^ 🤓 .*/ containedin=ALL + syntax match ChatBotHeader /^ 🤖 .*/ containedin=ALL + highlight def link ChatBotHeader TermCursor +]]) + +vim.b.current_syntax = 'shellbot' +if vim.g.main_syntax == 'shellbot' then + vim.api.nvim_del_var('main_syntax') +end +vim.o.cpo = cpo From 3201989b90f5ca16e3a2c82437a03e66c066ce56 Mon Sep 17 00:00:00 2001 From: Greg Hurrell Date: Fri, 11 Oct 2024 23:16:56 +0200 Subject: [PATCH 10/25] feat: add `:ChatGPT` command So the user doesn't need to set it up manually. --- plugin/shellbot.lua | 12 ++++++++++++ 1 file changed, 12 insertions(+) create mode 100644 plugin/shellbot.lua diff --git a/plugin/shellbot.lua b/plugin/shellbot.lua new file mode 100644 index 0000000..74b449c --- /dev/null +++ b/plugin/shellbot.lua @@ -0,0 +1,12 @@ +vim.api.nvim_create_user_command('ChatGPT', function() + local shellbot = require('chatbot') + local env = vim.env['SHELLBOT'] + if env ~= nil then + local executable = vim.fn.split(env, ' ')[1] + if executable ~= nil and vim.fn.executable(executable) == 1 then + shellbot.chatbot() + return + end + end + vim.api.nvim_err_writeln('error: SHELLBOT does not appear to be executable') +end, {}) From a089bc676b5095b8f9f4bc218440be9982aecfcc Mon Sep 17 00:00:00 2001 From: Greg Hurrell Date: Fri, 11 Oct 2024 23:28:33 +0200 Subject: [PATCH 11/25] docs: add instructions for installation --- README.md | 66 ++++++++++++++++++++++++++++++++++++++++++++++++++----- 1 file changed, 61 insertions(+), 5 deletions(-) diff --git a/README.md b/README.md index 356690c..aaca199 100644 --- a/README.md +++ b/README.md @@ -1,29 +1,85 @@ # Streaming ChatGPT for Shell and Neovim + AI assistants are transformational for programmers. However, ChatGPT 4 is also relatively slow. Streaming its responses greatly improves the user experience. These utilities attempts to bring these tools closer to the command-line and editor while preserving streaming. There are three parts here: + 1. A Rust binary that streams completion responses to stdin 2. A shell script that builds a little REPL over that binary 3. A Neovim Lua plug-in that brings this functionality into the editor - ## Rust program + The Rust program can be built with `cargo build`. It expects an `OPENAI_API_KEY` and/or an `ANTHROPIC_API_KEY` environment variable. If both keys are provided, Anthropic is used. The Rust program can take two kinds of input, read from stdin: + 1. **Raw input:** In this case, a System prompt is provided in the compiled code 2. **Transcript:** The Rust program also accepts a homegrown "transcript" format in which transcript sections are delineated by lines which look like this ``` ===USER=== ``` + If a transcript does not start with a System section, then the default System prompt is used. -## Lua script -The included lua script can be copied to `.config/nvim/lua` and installed with something like +## Installation + +### Using `git clone` + ``` -vim.cmd("command! ChatGPT lua require'chatbot'.chatbot()") +mkdir -p ~/.config/nvim/pack/bundle/start +git clone https://github.com/wolffiex/shellbot.git ~/.config/nvim/pack/bundle/start/shellbot +cd ~/.config/nvim/pack/bundle/start/shellbot +cargo build ``` -This command locates the Rust binary through the `SHELLBOT` environment variable. This should be set to the absolute path of the rust binary built in the step above. +### Using `packer.nvim` + +```lua +use { + 'wolffiex/shellbot', + run = 'cargo build' +} +``` + +### Using `vim-plug` + +```vim +Plug 'wolffiex/shellbot', { 'do': 'cargo build' } +``` + +### Using `dein.vim` + +```vim +call dein#add('wolffiex/shellbot', { 'build': 'cargo build' }) +``` + +### Using `lazy.nvim` + +```lua +{ + 'wolffiex/shellbot', + build = 'cargo build' +} +``` + +### Using `Vundle` + +```vim +Plugin 'wolffiex/shellbot' +``` + +After installation, run `:!cargo build` in the plugin directory. + +## Commands + +### `:ChatGPT` + +The plugin defines a `:ChatGPT` command that locates the Rust binary through the `SHELLBOT` environment variable. This should be set to the absolute path of the rust binary built in the step above. This plugin is optimized to allow for streaming. It attempts to keep new input in view by repositioning the cursor at the end of the buffer as new text is appended. The plugin takes care to work in the case that the user switches away from the window where the response is coming in. To turn off the cursor movement while a response is streaming, hit "Enter" or "Space." This will free the cursor for the rest of the response. +### `:checkhealth shellbot` + +Verifies that the file defined by `SHELLBOT` exists and is executable. + ## Shell script + `shellbot.sh` can be used from the command line in cases where the editor isn't active. Because it uses `fold` for word wrap, it works best in a narrow window. The first prompt comes from $EDITOR. Subsequent prompts are taken with `read`. Hitting enter on a blank line does submit. From 0809fdb5889a4d6e26d83f3b54e594c1ca22df57 Mon Sep 17 00:00:00 2001 From: Greg Hurrell Date: Fri, 11 Oct 2024 23:35:53 +0200 Subject: [PATCH 12/25] feat: add an ftplugin file This gives us a place to put settings. I'll move the existing settings from the plugin into here in the next commit. --- ftplugin/shellbot.lua | 10 ++++++++++ 1 file changed, 10 insertions(+) create mode 100644 ftplugin/shellbot.lua diff --git a/ftplugin/shellbot.lua b/ftplugin/shellbot.lua new file mode 100644 index 0000000..54c4193 --- /dev/null +++ b/ftplugin/shellbot.lua @@ -0,0 +1,10 @@ +vim.bo.textwidth = 0 +vim.wo.list = false +vim.wo.number = false +vim.wo.relativenumber = false +vim.wo.showbreak = 'NONE' + +local has_shellbot = pcall(require, 'chatbot') +if has_shellbot then + vim.keymap.set({ 'i', 'n' }, '', ChatBotSubmit, { buffer = true }) +end From a24dafd3bb59caef4821930b66f9e828a43b46eb Mon Sep 17 00:00:00 2001 From: Greg Hurrell Date: Fri, 11 Oct 2024 23:38:21 +0200 Subject: [PATCH 13/25] refactor: move settings from module into ftplugin --- ftplugin/shellbot.lua | 6 ++++++ lua/chatbot.lua | 6 ------ 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/ftplugin/shellbot.lua b/ftplugin/shellbot.lua index 54c4193..cb0a397 100644 --- a/ftplugin/shellbot.lua +++ b/ftplugin/shellbot.lua @@ -1,8 +1,14 @@ +vim.bo.buflisted = true +vim.bo.buftype = 'nofile' +vim.bo.modified = false vim.bo.textwidth = 0 +vim.wo.breakindent = true +vim.wo.linebreak = true vim.wo.list = false vim.wo.number = false vim.wo.relativenumber = false vim.wo.showbreak = 'NONE' +vim.wo.wrap = true local has_shellbot = pcall(require, 'chatbot') if has_shellbot then diff --git a/lua/chatbot.lua b/lua/chatbot.lua index 5643a82..b54ccf6 100644 --- a/lua/chatbot.lua +++ b/lua/chatbot.lua @@ -211,13 +211,7 @@ function ChatBotInit() local winnr = vim.api.nvim_get_current_win() local bufnr = vim.api.nvim_get_current_buf() buffer_sync_cursor[bufnr] = true - vim.wo.breakindent = true - vim.wo.wrap = true - vim.wo.linebreak = true vim.api.nvim_buf_set_option(bufnr, 'filetype', 'shellbot') - vim.api.nvim_buf_set_option(bufnr, 'buftype', 'nofile') - vim.api.nvim_buf_set_option(bufnr, 'buflisted', true) - vim.api.nvim_buf_set_option(bufnr, 'modified', false) add_transcript_header(winnr, bufnr, "USER", 0) local modes = { 'n', 'i' } for _, mode in ipairs(modes) do From 321ea3ed00b02e5fca5769cef9b427f3ed303f44 Mon Sep 17 00:00:00 2001 From: Greg Hurrell Date: Fri, 11 Oct 2024 23:40:09 +0200 Subject: [PATCH 14/25] refactor: replace deprecated `nvim_buf_set_option()` call --- lua/chatbot.lua | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lua/chatbot.lua b/lua/chatbot.lua index b54ccf6..1683510 100644 --- a/lua/chatbot.lua +++ b/lua/chatbot.lua @@ -211,7 +211,7 @@ function ChatBotInit() local winnr = vim.api.nvim_get_current_win() local bufnr = vim.api.nvim_get_current_buf() buffer_sync_cursor[bufnr] = true - vim.api.nvim_buf_set_option(bufnr, 'filetype', 'shellbot') + vim.api.nvim_set_option_value('filetype', 'shellbot', { buf = bufnr }) add_transcript_header(winnr, bufnr, "USER", 0) local modes = { 'n', 'i' } for _, mode in ipairs(modes) do From c934b214da4af9aad54ab625461036940edae670 Mon Sep 17 00:00:00 2001 From: Greg Hurrell Date: Fri, 11 Oct 2024 23:43:45 +0200 Subject: [PATCH 15/25] refactor: move mappings into ftplugin --- ftplugin/shellbot.lua | 2 ++ lua/chatbot.lua | 7 ------- 2 files changed, 2 insertions(+), 7 deletions(-) diff --git a/ftplugin/shellbot.lua b/ftplugin/shellbot.lua index cb0a397..875c549 100644 --- a/ftplugin/shellbot.lua +++ b/ftplugin/shellbot.lua @@ -13,4 +13,6 @@ vim.wo.wrap = true local has_shellbot = pcall(require, 'chatbot') if has_shellbot then vim.keymap.set({ 'i', 'n' }, '', ChatBotSubmit, { buffer = true }) + vim.keymap.set({ 'i', 'n' }, '', ChatBotSubmit, { buffer = true }) + vim.keymap.set({ 'i', 'n' }, '', ChatBotNewBuf, { buffer = true }) end diff --git a/lua/chatbot.lua b/lua/chatbot.lua index 1683510..0c6581d 100644 --- a/lua/chatbot.lua +++ b/lua/chatbot.lua @@ -213,13 +213,6 @@ function ChatBotInit() buffer_sync_cursor[bufnr] = true vim.api.nvim_set_option_value('filetype', 'shellbot', { buf = bufnr }) add_transcript_header(winnr, bufnr, "USER", 0) - local modes = { 'n', 'i' } - for _, mode in ipairs(modes) do - vim.api.nvim_buf_set_keymap(bufnr, mode, '', ':lua ChatBotSubmit()', - { noremap = true, silent = true }) - vim.api.nvim_buf_set_keymap(bufnr, mode, '', ':lua ChatBotNewBuf()', - { noremap = true, silent = true }) - end end function M.chatbot() From 7a92356640636ad5312f16c3cca84d32bf4b4b95 Mon Sep 17 00:00:00 2001 From: Greg Hurrell Date: Wed, 20 Nov 2024 07:24:33 -0500 Subject: [PATCH 16/25] feat: add ability to set model at runtime via env var - ANTHROPIC_MODEL for Anthropic - OPENAI_MODEL for ChatGPT --- src/anthropic.rs | 8 ++++++-- src/api.rs | 16 ++++++++++------ src/main.rs | 12 ++++++++++-- src/openai.rs | 8 ++++++-- 4 files changed, 32 insertions(+), 12 deletions(-) diff --git a/src/anthropic.rs b/src/anthropic.rs index b3fe064..3556272 100644 --- a/src/anthropic.rs +++ b/src/anthropic.rs @@ -6,7 +6,7 @@ use reqwest::{Client, RequestBuilder}; use serde::{Deserialize, Serialize}; const MODEL: &str = "claude-3-opus-20240229"; -pub fn get_request(api_key: &str, request: ChatRequest) -> RequestBuilder { +pub fn get_request(api_key: &str, model: &str, request: ChatRequest) -> RequestBuilder { let client = Client::new(); let url = "https://api.anthropic.com/v1/messages"; let mut headers = HeaderMap::new(); @@ -28,7 +28,11 @@ pub fn get_request(api_key: &str, request: ChatRequest) -> RequestBuilder { ); let request = RequestJSON { - model: MODEL.to_string(), + model: if model.is_empty() { + MODEL.to_string() + } else { + model.to_string() + }, system: request.system_prompt, messages: request.transcript, stream: true, diff --git a/src/api.rs b/src/api.rs index c0ba805..b520a9b 100644 --- a/src/api.rs +++ b/src/api.rs @@ -11,14 +11,18 @@ use crate::sse::SSEConverter; use crate::sse::SSEvent; pub enum ApiProvider { - OpenAI(String), - Anthropic(String), + OpenAI(String, String), + Anthropic(String, String), } pub fn stream_response<'a>(provider: ApiProvider, request: ChatRequest) -> Receiver { let request = match provider { - ApiProvider::OpenAI(ref api_key) => openai::get_request(&api_key, request), - ApiProvider::Anthropic(ref api_key) => anthropic::get_request(&api_key, request), + ApiProvider::OpenAI(ref api_key, ref model) => { + openai::get_request(&api_key, &model, request) + } + ApiProvider::Anthropic(ref api_key, ref model) => { + anthropic::get_request(&api_key, &model, request) + } }; let (sender, receiver) = mpsc::channel(100); tokio::spawn(async move { send_response(&provider, request, sender).await }); @@ -98,8 +102,8 @@ fn convert_chunk(chunk: Bytes) -> String { fn process_sse(provider: &ApiProvider, event: SSEvent) -> Option { match provider { - ApiProvider::Anthropic(_) => anthropic::convert_sse(event), - ApiProvider::OpenAI(_) => openai::convert_sse(event), + ApiProvider::Anthropic(_, _) => anthropic::convert_sse(event), + ApiProvider::OpenAI(_, _) => openai::convert_sse(event), } } diff --git a/src/main.rs b/src/main.rs index e49f564..d621a31 100644 --- a/src/main.rs +++ b/src/main.rs @@ -38,8 +38,16 @@ async fn main() { } let request = structure_input(); let provider = std::env::var("ANTHROPIC_API_KEY") - .map(ApiProvider::Anthropic) - .or_else(|_| std::env::var("OPENAI_API_KEY").map(ApiProvider::OpenAI)) + .map(|key| { + let model = std::env::var("ANTHROPIC_MODEL").unwrap_or_default(); + ApiProvider::Anthropic(key, model) + }) + .or_else(|_| { + std::env::var("OPENAI_API_KEY").map(|key| { + let model = std::env::var("OPENAI_MODEL").unwrap_or_default(); + ApiProvider::OpenAI(key, model) + }) + }) .unwrap_or_else(|_| panic!("No API key provided")); let mut receiver = stream_response(provider, request); diff --git a/src/openai.rs b/src/openai.rs index 4f8787a..545b33e 100644 --- a/src/openai.rs +++ b/src/openai.rs @@ -5,7 +5,7 @@ use reqwest::{Client, RequestBuilder}; use serde::{Deserialize, Serialize}; const MODEL: &str = "gpt-4o"; -pub fn get_request(api_key: &str, request: ChatRequest) -> RequestBuilder { +pub fn get_request(api_key: &str, model: &str, request: ChatRequest) -> RequestBuilder { let mut messages = vec![ChatMessage { role: ChatRole::System, content: request.system_prompt, @@ -23,7 +23,11 @@ pub fn get_request(api_key: &str, request: ChatRequest) -> RequestBuilder { HeaderValue::from_str(&format!("Bearer {}", api_key)).unwrap(), ); let request = RequestJSON { - model: MODEL.to_string(), + model: if model.is_empty() { + MODEL.to_string() + } else { + model.to_string() + }, stream: true, messages, }; From f848d28f2f5ab74d0b6f90d7835aad92f173e699 Mon Sep 17 00:00:00 2001 From: Greg Hurrell Date: Wed, 20 Nov 2024 07:46:01 -0500 Subject: [PATCH 17/25] fix: suppress system prompt for "o1-preview" and "o1-mini" models --- src/openai.rs | 11 +++++++---- 1 file changed, 7 insertions(+), 4 deletions(-) diff --git a/src/openai.rs b/src/openai.rs index 545b33e..6a38053 100644 --- a/src/openai.rs +++ b/src/openai.rs @@ -6,10 +6,13 @@ use serde::{Deserialize, Serialize}; const MODEL: &str = "gpt-4o"; pub fn get_request(api_key: &str, model: &str, request: ChatRequest) -> RequestBuilder { - let mut messages = vec![ChatMessage { - role: ChatRole::System, - content: request.system_prompt, - }]; + let mut messages = vec![]; + if model != "o1-preview" && model != "o1-mini" { + messages.push(ChatMessage { + role: ChatRole::System, + content: request.system_prompt, + }); + } messages.extend_from_slice(&request.transcript); let client = Client::new(); let url = "https://api.openai.com/v1/chat/completions"; From f63da96552b72a69e80764cbe84d791a63147474 Mon Sep 17 00:00:00 2001 From: Greg Hurrell Date: Mon, 6 Jan 2025 19:09:31 +0100 Subject: [PATCH 18/25] refactor!: rename `:ChatGPT` to `:Shellbot` Seeing as ChatGPT isn't even the default LLM (Anthropic is used preferentially). --- README.md | 8 ++++---- plugin/shellbot.lua | 2 +- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/README.md b/README.md index aaca199..f05b5db 100644 --- a/README.md +++ b/README.md @@ -1,6 +1,6 @@ -# Streaming ChatGPT for Shell and Neovim +# Streaming LLM for Shell and Neovim -AI assistants are transformational for programmers. However, ChatGPT 4 is also relatively slow. Streaming its responses greatly improves the user experience. These utilities attempts to bring these tools closer to the command-line and editor while preserving streaming. There are three parts here: +AI assistants are transformational for programmers. However, models like ChatGPT 4 are also relatively slow. Streaming their responses greatly improves the user experience. These utilities attempts to bring these tools closer to the command-line and editor while preserving streaming. There are three parts here: 1. A Rust binary that streams completion responses to stdin 2. A shell script that builds a little REPL over that binary @@ -70,9 +70,9 @@ After installation, run `:!cargo build` in the plugin directory. ## Commands -### `:ChatGPT` +### `:Shellbot` -The plugin defines a `:ChatGPT` command that locates the Rust binary through the `SHELLBOT` environment variable. This should be set to the absolute path of the rust binary built in the step above. +The plugin defines a `:Shellbot` command that locates the Rust binary through the `SHELLBOT` environment variable. This should be set to the absolute path of the rust binary built in the step above. This plugin is optimized to allow for streaming. It attempts to keep new input in view by repositioning the cursor at the end of the buffer as new text is appended. The plugin takes care to work in the case that the user switches away from the window where the response is coming in. To turn off the cursor movement while a response is streaming, hit "Enter" or "Space." This will free the cursor for the rest of the response. diff --git a/plugin/shellbot.lua b/plugin/shellbot.lua index 74b449c..28b7a47 100644 --- a/plugin/shellbot.lua +++ b/plugin/shellbot.lua @@ -1,4 +1,4 @@ -vim.api.nvim_create_user_command('ChatGPT', function() +vim.api.nvim_create_user_command('Shellbot', function() local shellbot = require('chatbot') local env = vim.env['SHELLBOT'] if env ~= nil then From c9f2d7165d67e724ca531089e8f880a963c2e55f Mon Sep 17 00:00:00 2001 From: Greg Hurrell Date: Mon, 6 Jan 2025 19:11:01 +0100 Subject: [PATCH 19/25] chore: default Anthropic to "claude-3-5-sonnet-20241022" Model is faster and cheaper. If Opus ends up getting bumped to 3.5 it may be worth updating the default to that later on. --- src/anthropic.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/anthropic.rs b/src/anthropic.rs index 3556272..674c797 100644 --- a/src/anthropic.rs +++ b/src/anthropic.rs @@ -5,7 +5,7 @@ use reqwest::header::{HeaderMap, HeaderValue}; use reqwest::{Client, RequestBuilder}; use serde::{Deserialize, Serialize}; -const MODEL: &str = "claude-3-opus-20240229"; +const MODEL: &str = "claude-3-5-sonnet-20241022"; pub fn get_request(api_key: &str, model: &str, request: ChatRequest) -> RequestBuilder { let client = Client::new(); let url = "https://api.anthropic.com/v1/messages"; From eb6d184573beb2cd9fbd4f1097bcd4c34dd64f5f Mon Sep 17 00:00:00 2001 From: Greg Hurrell Date: Mon, 6 Jan 2025 19:20:34 +0100 Subject: [PATCH 20/25] docs: document new environment variables --- README.md | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/README.md b/README.md index f05b5db..8002cee 100644 --- a/README.md +++ b/README.md @@ -17,7 +17,11 @@ The Rust program can be built with `cargo build`. It expects an `OPENAI_API_KEY` ===USER=== ``` -If a transcript does not start with a System section, then the default System prompt is used. +If a transcript does not start with a System section, then the default System prompt is used. The default prompt can be customized with contents from a file passed as a first argument to the executable. + +To override the default Anthropic model (`claude-3-5-sonnet-20241022`), specify the desired model via the `ANTHROPIC_MODEL` environment variable. + +To override the default OpenAI model (`gpt-4o`), set the `OPENAPI_MODEL` environment variable to the desired value. ## Installation @@ -68,11 +72,11 @@ Plugin 'wolffiex/shellbot' After installation, run `:!cargo build` in the plugin directory. -## Commands +## Neovim commands ### `:Shellbot` -The plugin defines a `:Shellbot` command that locates the Rust binary through the `SHELLBOT` environment variable. This should be set to the absolute path of the rust binary built in the step above. +The plugin defines a `:Shellbot` command that locates the Rust binary through the `SHELLBOT` environment variable. This should be set to the absolute path of the Rust binary built in the step above. This plugin is optimized to allow for streaming. It attempts to keep new input in view by repositioning the cursor at the end of the buffer as new text is appended. The plugin takes care to work in the case that the user switches away from the window where the response is coming in. To turn off the cursor movement while a response is streaming, hit "Enter" or "Space." This will free the cursor for the rest of the response. From 78a6114e64372356ad2d076615a177317848165e Mon Sep 17 00:00:00 2001 From: Greg Hurrell Date: Mon, 24 Feb 2025 21:57:57 +0100 Subject: [PATCH 21/25] chore: bump default Anthropic model to claude-3-7-sonnet-20250219 The new hotness: - https://www.anthropic.com/news/claude-3-7-sonnet --- README.md | 2 +- src/anthropic.rs | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/README.md b/README.md index 8002cee..469ef68 100644 --- a/README.md +++ b/README.md @@ -19,7 +19,7 @@ The Rust program can be built with `cargo build`. It expects an `OPENAI_API_KEY` If a transcript does not start with a System section, then the default System prompt is used. The default prompt can be customized with contents from a file passed as a first argument to the executable. -To override the default Anthropic model (`claude-3-5-sonnet-20241022`), specify the desired model via the `ANTHROPIC_MODEL` environment variable. +To override the default Anthropic model (`claude-3-7-sonnet-20250219`), specify the desired model via the `ANTHROPIC_MODEL` environment variable. To override the default OpenAI model (`gpt-4o`), set the `OPENAPI_MODEL` environment variable to the desired value. diff --git a/src/anthropic.rs b/src/anthropic.rs index 674c797..bf5a0e5 100644 --- a/src/anthropic.rs +++ b/src/anthropic.rs @@ -5,7 +5,7 @@ use reqwest::header::{HeaderMap, HeaderValue}; use reqwest::{Client, RequestBuilder}; use serde::{Deserialize, Serialize}; -const MODEL: &str = "claude-3-5-sonnet-20241022"; +const MODEL: &str = "claude-3-7-sonnet-20250219"; pub fn get_request(api_key: &str, model: &str, request: ChatRequest) -> RequestBuilder { let client = Client::new(); let url = "https://api.anthropic.com/v1/messages"; From 4f06149512b2c72bbe7697c42addbb2a2f7bfc0e Mon Sep 17 00:00:00 2001 From: Greg Hurrell Date: Tue, 29 Apr 2025 13:21:26 +0200 Subject: [PATCH 22/25] feat: teach `chatbot()` to take an `env` table This allows you to easily set up multiple commands that can be used to interact with different models/APIs. For example, I have something like this in my dotfiles, setting up `:ChatGPT`, `:Claude`, `:Opus` etc: ```lua local has_shellbot, shellbot = pcall(require, 'chatbot') if has_shellbot then -- Set up wrapper commands for specifically targetting ChatGPT, Claude (etc). local function get_executable() local executable = vim.fn.split((vim.env['SHELLBOT'] or '/dev/null'), ' ')[1] if executable and vim.fn.executable(executable) == 1 then return executable else vim.api.nvim_echo( { { 'error: $SHELLBOT does not appear to be executable', 'ErrorMsg', } }, true, {} ) end end vim.api.nvim_create_user_command('ChatGPT', function() local executable = get_executable() if executable then shellbot.chatbot({ OPENAI_API_KEY = vim.env.OPENAI_API_KEY, }) end end, {}) vim.api.nvim_create_user_command('ChatGPTX', function() local executable = get_executable() if executable then shellbot.chatbot({ OPENAI_API_KEY = vim.env.OPENAI_API_KEY, OPENAI_MODEL = 'o1-mini', }) end end, {}) vim.api.nvim_create_user_command('Claude', function() local executable = get_executable() if executable then shellbot.chatbot({ ANTHROPIC_API_KEY = vim.env.ANTHROPIC_API_KEY, }) end end, {}) vim.api.nvim_create_user_command('Opus', function() local executable = get_executable() if executable then shellbot.chatbot({ ANTHROPIC_API_KEY = vim.env.ANTHROPIC_API_KEY, ANTHROPIC_MODEL = 'claude-3-opus-20240229', }) end end, {}) end ``` --- lua/chatbot.lua | 24 ++++++++++++++++-------- 1 file changed, 16 insertions(+), 8 deletions(-) diff --git a/lua/chatbot.lua b/lua/chatbot.lua index 0c6581d..1021c8e 100644 --- a/lua/chatbot.lua +++ b/lua/chatbot.lua @@ -10,6 +10,8 @@ local roles = { ASSISTANT = nbsp .. "🤖 «vimbot»" .. nbsp, } +local buffer_env = {} + local buffer_sync_cursor = {} function ChatBotCancelCursorSync() local bufnr = vim.api.nvim_get_current_buf() @@ -44,6 +46,10 @@ function ChatBotSubmit() vim.cmd("normal! Go") local winnr = vim.api.nvim_get_current_win() local bufnr = vim.api.nvim_get_current_buf() + local env = buffer_env[bufnr] and vim.tbl_extend('keep', buffer_env[bufnr], { + SHELLBOT_LOG_FILE = vim.env['SHELLBOT_LOG_FILE'], + }) + local clear_env = not not env buffer_sync_cursor[bufnr] = true local function receive_stream(_, data, _) if #data > 1 or data[1] ~= '' then @@ -113,6 +119,8 @@ function ChatBotSubmit() local output = {} local job_id = vim.fn.jobstart(bot_cmd, { + clear_env = clear_env, + env = env, on_stdout = function(_, data, _) if data[1] ~= "" then table.insert(output, data[1]) @@ -155,6 +163,8 @@ function ChatBotSubmit() end local job_id = vim.fn.jobstart(bot_cmd, { + clear_env = clear_env, + env = env, on_stdout = receive_stream, on_exit = stream_done, on_stderr = function(_, data, _) @@ -203,27 +213,25 @@ function ChatBotSubmit() end function ChatBotNewBuf() + local bufnr = vim.api.nvim_get_current_buf() vim.cmd("enew") - ChatBotInit() + ChatBotInit(buffer_env[bufnr]) end -function ChatBotInit() +function ChatBotInit(env) local winnr = vim.api.nvim_get_current_win() local bufnr = vim.api.nvim_get_current_buf() + buffer_env[bufnr] = env buffer_sync_cursor[bufnr] = true vim.api.nvim_set_option_value('filetype', 'shellbot', { buf = bufnr }) add_transcript_header(winnr, bufnr, "USER", 0) end -function M.chatbot() +function M.chatbot(env) vim.cmd("botright vnew") vim.cmd("set winfixwidth") vim.cmd("vertical resize 60") - ChatBotInit() -end - -function M.chatbot_init() - ChatBotInit() + ChatBotInit(env) end function ChatBotCancelResponse() From 84b1c35be8474d1f327f3cd1caa13c297419fcbe Mon Sep 17 00:00:00 2001 From: Greg Hurrell Date: Mon, 12 May 2025 11:05:58 +0200 Subject: [PATCH 23/25] chore: bump OpenAI model to GPT-4.1 MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit > GPT-4.1 is our flagship model for complex tasks. It is well suited for > problem solving across domains. https://platform.openai.com/docs/models/gpt-4.1 Marketing spiel: > Alongside 4.1 mini and 4.1 nano, it makes up our latest flagship GPT > family, specifically trained for developer use-cases like coding, > instruction-following, and function-calling. > > GPT-4.1 represents a significant upgrade over GPT-4o, particularly for > real-world software engineering tasks—it achieves 55% on SWE-bench > Verified, compared to GPT-4o’s 33%. It’s notably more skilled at > frontend coding, providing clean diffs, structured responses, reliable > tool usage, and more. > > Get started with GPT-4.1 > > • Largest context window: GPT-4.1 offers the largest context window > of our models—supporting over 1 million tokens—and is able to > better use that context with improved long-context comprehension. > • Lower costs and latency: GPT-4.1 pushes model performance > forward at every point on the latency curve. These new models are > 26% cheaper than GPT-4o on median queries, and nano is our fastest > and cheapest model ever. And, there's no extra charge for using long > context—it’s just the normal token prices. > • Instruction following: GPT-4.1 follows instructions more > reliably. On internal evals, GPT-4.1 outranks GPT-4o on tasks like > format adherence, complying with negative instructions, and ordering. > > Thomson Reuters tested GPT‑4.1 with CoCounsel, their > professional-grade AI assistant for legal work. They reported a > 17% improvement in multi-document review accuracy compared to > GPT‑4o on internal long-context benchmarks. Carlyle used GPT‑4.1 > to accurately extract granular financial data from multiple, > lengthy documents—including PDFs, Excel files, and other complex > formats—achieving a 50% improvement on retrieval tasks involving > large, data-dense documents. > > I’ve seen many developers adopt GPT-4.1 recently and recognize its > coding capabilities, instruction-following, speed, and price, so I > hope’ll you get a chance to try them out too! And as always, please > let us know what you think. --- src/openai.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/openai.rs b/src/openai.rs index 6a38053..1d45a23 100644 --- a/src/openai.rs +++ b/src/openai.rs @@ -4,7 +4,7 @@ use reqwest::header::{HeaderMap, HeaderValue}; use reqwest::{Client, RequestBuilder}; use serde::{Deserialize, Serialize}; -const MODEL: &str = "gpt-4o"; +const MODEL: &str = "gpt-4.1"; pub fn get_request(api_key: &str, model: &str, request: ChatRequest) -> RequestBuilder { let mut messages = vec![]; if model != "o1-preview" && model != "o1-mini" { From f8470e5718188078fef50a48846e85fc50b3c3cf Mon Sep 17 00:00:00 2001 From: Greg Hurrell Date: Tue, 30 Sep 2025 00:35:58 +0200 Subject: [PATCH 24/25] chore: bump models to latest Anthropic goes to claude-sonnet-4-5-20250929. OpenAI goes to gpt-5. --- src/anthropic.rs | 2 +- src/openai.rs | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/src/anthropic.rs b/src/anthropic.rs index bf5a0e5..9c217de 100644 --- a/src/anthropic.rs +++ b/src/anthropic.rs @@ -5,7 +5,7 @@ use reqwest::header::{HeaderMap, HeaderValue}; use reqwest::{Client, RequestBuilder}; use serde::{Deserialize, Serialize}; -const MODEL: &str = "claude-3-7-sonnet-20250219"; +const MODEL: &str = "claude-sonnet-4-5-20250929"; pub fn get_request(api_key: &str, model: &str, request: ChatRequest) -> RequestBuilder { let client = Client::new(); let url = "https://api.anthropic.com/v1/messages"; diff --git a/src/openai.rs b/src/openai.rs index 1d45a23..3ba0081 100644 --- a/src/openai.rs +++ b/src/openai.rs @@ -4,7 +4,7 @@ use reqwest::header::{HeaderMap, HeaderValue}; use reqwest::{Client, RequestBuilder}; use serde::{Deserialize, Serialize}; -const MODEL: &str = "gpt-4.1"; +const MODEL: &str = "gpt-5"; pub fn get_request(api_key: &str, model: &str, request: ChatRequest) -> RequestBuilder { let mut messages = vec![]; if model != "o1-preview" && model != "o1-mini" { From 9164ff438ae48d024d7680b5503ed001bcf59686 Mon Sep 17 00:00:00 2001 From: Greg Hurrell Date: Fri, 6 Feb 2026 10:46:41 +0100 Subject: [PATCH 25/25] chore: bump ANTHROPIC_MODEL default from old Sonnet to new Opus Using alias documented here: - https://platform.claude.com/docs/en/about-claude/models/overview --- README.md | 4 ++-- src/anthropic.rs | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/README.md b/README.md index 469ef68..7ae2911 100644 --- a/README.md +++ b/README.md @@ -19,9 +19,9 @@ The Rust program can be built with `cargo build`. It expects an `OPENAI_API_KEY` If a transcript does not start with a System section, then the default System prompt is used. The default prompt can be customized with contents from a file passed as a first argument to the executable. -To override the default Anthropic model (`claude-3-7-sonnet-20250219`), specify the desired model via the `ANTHROPIC_MODEL` environment variable. +To override the default Anthropic model (`claude-opus-4-6`), specify the desired model via the `ANTHROPIC_MODEL` environment variable. -To override the default OpenAI model (`gpt-4o`), set the `OPENAPI_MODEL` environment variable to the desired value. +To override the default OpenAI model (`gpt-5`), set the `OPENAI_MODEL` environment variable to the desired value. ## Installation diff --git a/src/anthropic.rs b/src/anthropic.rs index 9c217de..ace8487 100644 --- a/src/anthropic.rs +++ b/src/anthropic.rs @@ -5,7 +5,7 @@ use reqwest::header::{HeaderMap, HeaderValue}; use reqwest::{Client, RequestBuilder}; use serde::{Deserialize, Serialize}; -const MODEL: &str = "claude-sonnet-4-5-20250929"; +const MODEL: &str = "claude-opus-4-6"; pub fn get_request(api_key: &str, model: &str, request: ChatRequest) -> RequestBuilder { let client = Client::new(); let url = "https://api.anthropic.com/v1/messages";