Skip to content
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
13 changes: 12 additions & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,18 @@ Features may be incomplete, bugs are likely to occur and breaking changes may oc

# Configuration

no configuration currently.
You can configure the plugin to use OpenRouter (an OpenAI-compatible proxy) by passing options to `setup`:

```lua
require('nes').setup({
provider = 'openrouter', -- 'copilot' (default) or 'openrouter'
api_key = 'YOUR_OPENROUTER_API_KEY', -- required for openrouter
base_url = 'https://api.openrouter.ai/v1', -- optional, default "https://api.openrouter.ai/v1"
model = 'gpt-3.5-turbo', -- optional, default "gpt-3.5-turbo" for openrouter, "copilot-nes-v" for copilot
Comment on lines +42 to +43

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

medium

The documentation for base_url is inconsistent with the implementation. The example value and the default mentioned in the comment (https://api.openrouter.ai/v1) point to a base path, but the code expects the full endpoint URL for chat completions. The actual default in the code is https://openrouter.ai/api/v1/chat/completions.

This discrepancy will likely confuse users and lead to incorrect configurations. Please update the documentation to reflect the correct default value and clarify that the full endpoint URL is expected.

Suggested change
base_url = 'https://api.openrouter.ai/v1', -- optional, default "https://api.openrouter.ai/v1"
model = 'gpt-3.5-turbo', -- optional, default "gpt-3.5-turbo" for openrouter, "copilot-nes-v" for copilot
base_url = 'https://openrouter.ai/api/v1/chat/completions', -- optional, default "https://openrouter.ai/api/v1/chat/completions"
model = 'gpt-3.5-turbo', -- optional, default "gpt-3.5-turbo" for openrouter, "copilot-nes-v" for copilot

})
```

Leave `provider = 'copilot'` (or omit `provider`) to use GitHub Copilot.

# Usage

Expand Down
60 changes: 60 additions & 0 deletions lua/nes/api.lua
Original file line number Diff line number Diff line change
Expand Up @@ -62,6 +62,66 @@ local function get_api_token()
end

function M.call(payload, callback)
local pkg = require("nes")
local opts = pkg.opts or {}
if opts.provider == "openrouter" then
local api_key = opts.api_key or vim.env.OPENROUTER_API_KEY
if not api_key then
error("OpenRouter API key not found")
end
local base_url = opts.base_url
or vim.env.OPENROUTER_API_BASE_URL
or "https://openrouter.ai/api/v1/chat/completions"
local open_payload = {
model = opts.model or "gpt-3.5-turbo",
messages = payload.messages,
temperature = payload.temperature,
top_p = payload.top_p,
n = payload.n,
stream = true,
}
local output = ""

curl.post(base_url, {

headers = {
["Content-Type"] = "application/json",
["Authorization"] = "Bearer " .. api_key,
},
on_error = function(err)
error("openrouter request error: " .. err)
end,
body = vim.json.encode(open_payload),
stream = function(_, chunk)
if not chunk then
return
end
if vim.startswith(chunk, "data: ") then
chunk = chunk:sub(6)
end
if chunk == "[DONE]" then
return
end
local ok, event = pcall(vim.json.decode, chunk)
if not ok then
return
end
if event and event.choices and event.choices[1] then
local choice = event.choices[1]
if choice.delta and choice.delta.content then
output = output .. choice.delta.content
end
end
end,
Comment on lines +95 to +115

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

high

The current stream processing logic may not be robust enough to handle cases where multiple Server-Sent Events (SSE) arrive in a single data chunk. The implementation assumes one data: line per chunk, which could lead to JSON decoding errors and incomplete responses if the provider sends multiple events together.

I recommend iterating over each line in the chunk to process each event separately. This will make the streaming logic more resilient.

stream = function(_, chunk)
  if not chunk then
    return
  end
  for line in chunk:gmatch("[^\r\n]+") do
    if vim.startswith(line, "data: ") then
      local data = line:sub(7)
      if data == "[DONE]" then
        return
      end
      local ok, event = pcall(vim.json.decode, data)
      if ok and event and event.choices and event.choices[1] then
        local choice = event.choices[1]
        if choice.delta and choice.delta.content then
          output = output .. choice.delta.content
        end
      end
    end
  end
end

callback = function()
callback(output)
-- __AUTO_GENERATED_PRINT_VAR_START__
print([==[M.call#if#callback output:]==], vim.inspect(output)) -- __AUTO_GENERATED_PRINT_VAR_END__
Comment on lines +118 to +119

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

medium

It appears a debug print statement was left in the code. This should be removed before merging to avoid polluting the user's output.

end,
})
return
end

local api_token = get_api_token()
local base_url = api_token.endpoints.proxy or api_token.endpoints.api

Expand Down
25 changes: 25 additions & 0 deletions lua/nes/context.lua
Original file line number Diff line number Diff line change
Expand Up @@ -90,6 +90,31 @@ function Context.new(bufnr)
return self
end

---@class CurrentVersion
---@field text string

---@class Context
---@field filename string
---@field original_code string
---@field edits string
---@field filetype string
---@field current_version CurrentVersion

---@class Message
---@field role string
---@field content string

---@class Payload
---@field messages Message[]
---@field model string
---@field temperature number
---@field top_p number
---@field prediction {type: string, content: string}
---@field n number
---@field stream boolean
---@field snippy {enabled: boolean}

---@return Payload
function Context:payload()
-- copy from vscode
return {
Expand Down
2 changes: 2 additions & 0 deletions lua/nes/core.lua
Original file line number Diff line number Diff line change
Expand Up @@ -279,6 +279,8 @@ function M.get_suggestion(bufnr)
local ctx = Context.new(bufnr)
local payload = ctx:payload()
require("nes.api").call(payload, function(stdout)
-- __AUTO_GENERATED_PRINT_VAR_START__
print([==[M.get_suggestion#(anon) stdout:]==], vim.inspect(stdout)) -- __AUTO_GENERATED_PRINT_VAR_END__
Comment on lines +282 to +283

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

medium

This appears to be a leftover debug print statement. It should be removed before this branch is merged.

local next_version = vim.trim(stdout)
assert(next_version)
if not vim.startswith(next_version, "<next-version>") then
Expand Down
2 changes: 2 additions & 0 deletions lua/nes/init.lua
Original file line number Diff line number Diff line change
@@ -1,7 +1,9 @@
local M = {}
M.opts = {}

function M.setup(opts)
opts = opts or {}
M.opts = opts

vim.api.nvim_set_hl(0, "NesAdd", { link = "DiffAdd", default = true })
vim.api.nvim_set_hl(0, "NesDelete", { link = "DiffDelete", default = true })
Expand Down
39 changes: 39 additions & 0 deletions samplecode.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,39 @@
package main

import (
"flag"
"fmt"
"math"

"log/slog"
)

var Args struct {
logLevel string
}

func init() {
flag.StringVar(&Args.logLevel, "log-level", "info", "Set the log level")
flag.Parse()
}

type Point struct {
x int
y int
}

func NewPoint(x, y int) Point {
return Point{x: x, y: y}
}

func (p *Point) Distance(other Point) float64 {
return math.Sqrt(float64((p.x-other.x)*(p.x-other.x) + (p.y-other.y)*(p.y-other.y)))
}

func (p *Point) String() string {
return fmt.Sprintf("Point(%d, %d)", p.x, p.y)
}

func main() {
slog.Info("Starting application", "logLevel", Args.logLevel)
}