Skip to content

Commit 41bc347

Browse files
committed
Improve local model configuration and documentation
1 parent e22c4ea commit 41bc347

File tree

4 files changed

+96
-17
lines changed

4 files changed

+96
-17
lines changed

README.md

Lines changed: 38 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,8 @@
44

55
A Neovim/Vim coding agent plugin. Neural integrates various machine learning
66
tools so you can let AI write code for you in Neovim/Vim, among other helpful
7-
things. Use OpenAI's APIs made famous with ChatGPT, in Vim.
7+
things. Use OpenAI's APIs made famous with ChatGPT, in Vim, or other local
8+
models.
89

910
## 🌟 Features
1011

@@ -20,8 +21,7 @@ Experience lightning-fast code generation and completion with asynchronous
2021
streaming.
2122

2223
Edit any kind of text document. It can be used to generate Python docstrings,
23-
fix comments spelling/grammar mistakes, generate ideas and much more. See
24-
[examples from OpenAI](https://beta.openai.com/examples) for a start.
24+
fix comments spelling/grammar mistakes, generate ideas and much more.
2525

2626
## 🔌 Plugin Integrations
2727

@@ -82,8 +82,21 @@ interact with. OpenAI is Neural's default data provider, and one of the easiest
8282
to configure.
8383

8484
You will need to obtain an [OpenAI API key](https://beta.openai.com/signup/).
85-
Once you have your key, configure Neural to use that key, whether in a Vim
86-
script or in a Lua config.
85+
Once you have your key, configure Neural to use that key, whether in a Lua
86+
config or in Vimscript.
87+
88+
```lua
89+
-- Configure Neural like so in Lua
90+
require('neural').setup({
91+
providers = {
92+
{
93+
openai = {
94+
api_key = vim.env.OPENAI_API_KEY,
95+
},
96+
},
97+
},
98+
})
99+
```
87100

88101
```vim
89102
" Configure Neural like so in Vimscript
@@ -98,22 +111,38 @@ let g:neural = {
98111
\}
99112
```
100113

114+
Try typing `:Neural say hello`, and if all goes well the machine learning
115+
tool will say "hello" to you in the current buffer. Type `:help neural` to
116+
see the full documentation.
117+
118+
You can configure the `url` for an OpenAI provider to run Neural with local
119+
models or other servers that offer an OpenAI compatible API, for example:
120+
101121
```lua
102122
-- Configure Neural like so in Lua
103123
require('neural').setup({
104124
providers = {
105125
{
106126
openai = {
107-
api_key = vim.env.OPENAI_API_KEY,
127+
url = 'http://localhost:7860',
108128
},
109129
},
110130
},
111131
})
112132
```
113133

114-
Try typing `:Neural say hello`, and if all goes well the machine learning
115-
tool will say "hello" to you in the current buffer. Type `:help neural` to
116-
see the full documentation.
134+
```vim
135+
" Configure Neural like so in Vimscript
136+
let g:neural = {
137+
\ 'providers': [
138+
\ {
139+
\ 'openai': {
140+
\ 'url': 'http://localhost:7860',
141+
\ },
142+
\ },
143+
\ ],
144+
\}
145+
```
117146

118147
## 🛠️ Commands
119148

doc/neural.txt

Lines changed: 9 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -367,6 +367,15 @@ use_chat_api *neural-provider-openai.use_chat_api*
367367
newer models may only run with the chat API.
368368

369369

370+
url *neural-provider-openai.url*
371+
Type: |String|
372+
Default: `'https://api.openai.com'`
373+
374+
For configuring the API URL to send LLM requests to. This URL can be
375+
configured to connect Neural to other models with OpenAI compatible APIs
376+
such as DeepSeek, Qwen, etc.
377+
378+
370379
-------------------------------------------------------------------------------
371380
4.4 Highlights *neural-highlights*
372381

src/neural/provider/openai.py

Lines changed: 7 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -47,8 +47,11 @@ def get_openai_completion(
4747
) -> None:
4848
headers = {
4949
"Content-Type": "application/json",
50-
"Authorization": f"Bearer {config.api_key}",
5150
}
51+
52+
if config.api_key:
53+
headers["Authorization"] = f"Bearer {config.api_key}"
54+
5255
data: dict[str, Any] = {
5356
"model": config.model,
5457
"temperature": config.temperature,
@@ -137,8 +140,8 @@ def load_config(raw_config: dict[str, Any]) -> Config:
137140

138141
api_key = raw_config.get('api_key')
139142

140-
if not isinstance(api_key, str) or not api_key: # type: ignore
141-
raise ValueError("api_key is not defined")
143+
if not isinstance(api_key, str | None): # type: ignore
144+
raise ValueError(f"api_key is an invalid type: {type(api_key)}")
142145

143146
model = raw_config.get('model')
144147

@@ -191,7 +194,7 @@ def load_config(raw_config: dict[str, Any]) -> Config:
191194

192195
return Config(
193196
url=url,
194-
api_key=api_key,
197+
api_key=api_key or '',
195198
model=model,
196199
use_chat_api=use_chat_api,
197200
temperature=temperature,

test/python/provider/test_openai.py

Lines changed: 42 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -11,9 +11,13 @@
1111
from neural.provider import openai
1212

1313

14-
def get_valid_config(model: str = "foo") -> dict[str, Any]:
14+
def get_valid_config(
15+
model: str = "foo",
16+
*,
17+
api_key: str = '.',
18+
) -> dict[str, str | int]:
1519
return {
16-
"api_key": ".",
20+
"api_key": api_key,
1721
"model": model,
1822
"prompt": "say hello",
1923
"temperature": 1,
@@ -35,8 +39,7 @@ def test_load_config_errors() -> None:
3539
for modification, expected_error in [
3640
({"url": 1}, "url must be a string"),
3741
({"url": "x"}, "url must start with http(s)://"),
38-
({"url": "https://x", "api_key": ""}, "api_key is not defined"),
39-
({"api_key": "."}, "model is not defined"),
42+
({"url": "https://x"}, "model is not defined"),
4043
({"model": ""}, "model is not defined"),
4144
(
4245
{"model": "x", "use_chat_api": 1},
@@ -126,6 +129,41 @@ def test_main_function_rate_other_error() -> None:
126129
openai.main()
127130

128131

132+
@pytest.mark.parametrize(['api_key', 'expected_headers'], [
133+
pytest.param(
134+
'sk-fake',
135+
{
136+
'Authorization': 'Bearer sk-fake',
137+
'Content-type': 'application/json',
138+
},
139+
id='authenticated',
140+
),
141+
pytest.param(
142+
'',
143+
{'Content-type': 'application/json'},
144+
id='unauthenticated',
145+
),
146+
])
147+
def test_openai_authentication(
148+
api_key: str,
149+
expected_headers: dict[str, str],
150+
) -> None:
151+
result_data = b'data: [DONE]\n\n'
152+
153+
with mock.patch.object(sys.stdin, 'readline') as readline_mock, \
154+
mock.patch.object(urllib.request, 'urlopen') as urlopen_mock:
155+
156+
urlopen_mock.return_value.__enter__.return_value = BytesIO(result_data)
157+
158+
readline_mock.return_value = json.dumps({
159+
"config": get_valid_config(api_key=api_key),
160+
"prompt": "hello there",
161+
})
162+
openai.main()
163+
164+
assert urlopen_mock.mock_calls[0][1][0].headers == expected_headers
165+
166+
129167
def test_print_openai_completion_results() -> None:
130168
result_data = (
131169
b'data: {"id": "cmpl-6jMlRJtbYTGrNwE6Lxy1Ns1EtD0is", "object": "text_completion", "created": 1676270285, "choices": [{"text": "\\n", "index": 0, "logprobs": null, "finish_reason": null}], "model": "gpt-3.5-turbo-instruct"}\n' # noqa

0 commit comments

Comments
 (0)