Skip to content
Merged
Changes from 1 commit
Commits
Show all changes
42 commits
Select commit Hold shift + click to select a range
5aaf247
llama : add infill sampler
ggerganov Oct 9, 2024
0566c69
llama.vim : neovim plugin
ggerganov Oct 9, 2024
0c649c8
llama.vim : fix suffix construction + fix virt text offset
ggerganov Oct 9, 2024
07e7dd4
llama.vim : handle space
ggerganov Oct 9, 2024
9d13e87
llama.vim : add processing info overlay
ggerganov Oct 9, 2024
6e82a03
llama.vim : display realtime [no ci]
ggerganov Oct 9, 2024
26a0c61
llama.vim : allow repeated suggestions [no ci]
ggerganov Oct 9, 2024
7e0b506
llama.vim : reduce scope of ids to local [no ci]
ggerganov Oct 9, 2024
41053f9
llama.vim : simplify init and cancel + auto-fim
ggerganov Oct 10, 2024
c507a65
llama.vim : async
ggerganov Oct 10, 2024
6669b55
llama.vim : set time limit for the generation phase
ggerganov Oct 10, 2024
2e8c350
llama.vim : fix edge cases
ggerganov Oct 10, 2024
4b1bd81
llama : simplify infill sampler
ggerganov Oct 10, 2024
865d9bc
llama : clean-up
ggerganov Oct 11, 2024
c9a46f4
llama.vim : minor [no ci]
ggerganov Oct 11, 2024
5624e91
llama.vim : fix docs [no ci]
ggerganov Oct 11, 2024
491f211
llama : improve infill sampler
ggerganov Oct 11, 2024
4f46e29
llama : print more info about control tokens
ggerganov Oct 13, 2024
b889022
llama.vim : add ring context from opened files and yanked text
ggerganov Oct 13, 2024
27bc11d
llama.vim : update server command [no ci]
ggerganov Oct 13, 2024
f794549
llama.vim : gather chunk on leaving buffer [no ci]
ggerganov Oct 13, 2024
27d53cb
llama.vim : logic to evict old chunks that are similar to new one
ggerganov Oct 13, 2024
d81a0ac
llama.vim : do not evict certain chunks [no ci]
ggerganov Oct 13, 2024
2960510
llama.vim : do not auto-fim when far from the end of the line [no ci]
ggerganov Oct 13, 2024
bc2857b
llama.vim : async context processing
ggerganov Oct 13, 2024
916c2ee
llama : simplify infill sampler
ggerganov Oct 13, 2024
ae76a09
llama.vim : pass filenames for each chunk
ggerganov Oct 13, 2024
9f8fa90
llama.vim : fix repetitions [no ci]
ggerganov Oct 13, 2024
25ecb35
llama.vim : simplify job logic + improve robustness and responsivness
ggerganov Oct 14, 2024
e4be74b
llama.vim : add top_p + improve responsivness + fix edge cases
ggerganov Oct 15, 2024
0c1f51b
llama : improve infill sampler
ggerganov Oct 15, 2024
42a9008
llama.vim : process extra chunks in the background [no ci]
ggerganov Oct 15, 2024
060573f
llama.vim : add comments [no ci]
ggerganov Oct 15, 2024
847c8c0
llama.vim : update infill API params [no ci]
ggerganov Oct 15, 2024
4583aef
llama.vim : final touches
ggerganov Oct 15, 2024
d1b8b21
llama.vim : fix repetitions of existing text
ggerganov Oct 17, 2024
1600d84
llama.vim : complete only whithin the local scope [no ci]
ggerganov Oct 17, 2024
6bb6e6d
llama.vim : display ring capacity [no ci]
ggerganov Oct 18, 2024
fe78c39
llama.vim : fix large chunk accept + comments [no ci]
ggerganov Oct 18, 2024
b8efb07
llama.vim : minor [no ci]
ggerganov Oct 18, 2024
32927e6
llama.vim : remove on-hold code + fixes [no ci]
ggerganov Oct 21, 2024
8fb5154
llama.vim : minor [no ci]
ggerganov Oct 21, 2024
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Prev Previous commit
Next Next commit
llama.vim : remove on-hold code + fixes [no ci]
  • Loading branch information
ggerganov committed Oct 21, 2024
commit 32927e68b7fbfd6dfa82e531d186f1b6b22612ae
22 changes: 13 additions & 9 deletions examples/llama.vim
Original file line number Diff line number Diff line change
Expand Up @@ -131,17 +131,15 @@ function! llama#init()

augroup llama
autocmd!
autocmd InsertEnter * inoremap <expr> <silent> <C-F> llama#fim_inline(v:false, v:false)
autocmd InsertEnter * inoremap <expr> <silent> <C-F> llama#fim_inline(v:false)
autocmd InsertLeavePre * call llama#fim_cancel()

autocmd CursorMoved * call s:on_move()
autocmd CursorMovedI * call s:on_move()
autocmd CompleteChanged * call llama#fim_cancel()

if g:llama_config.auto_fim
autocmd InsertEnter * call llama#fim(v:true, v:false)
autocmd CursorMovedI * call llama#fim(v:true, v:false)
"autocmd CursorHoldI * call llama#fim(v:true, v:true)
autocmd CursorMovedI * call llama#fim(v:true)
endif

" gather chunks upon yanking
Expand Down Expand Up @@ -329,16 +327,17 @@ function! s:ring_update()
endfunction

" necessary for 'inoremap <expr>'
function! llama#fim_inline(is_auto, on_hold) abort
call llama#fim(a:is_auto, a:on_hold)
function! llama#fim_inline(is_auto) abort
call llama#fim(a:is_auto)
return ''
endfunction

" the main FIM call
" takes local context around the cursor and sends it together with the extra context to the server for completion
function! llama#fim(is_auto, on_hold) abort
function! llama#fim(is_auto) abort
" we already have a suggestion for the current cursor position
if a:on_hold && (s:hint_shown || (s:pos_x == col('.') - 1 && s:pos_y == line('.')))
if s:hint_shown && !a:is_auto
call llama#fim_cancel()
return
endif

Expand All @@ -352,7 +351,7 @@ function! llama#fim(is_auto, on_hold) abort
endif

let s:t_fim_start = reltime()
let s:timer_fim = timer_start(600, {-> llama#fim(v:true, v:true)})
let s:timer_fim = timer_start(600, {-> llama#fim(v:true)})
return
endif

Expand Down Expand Up @@ -512,6 +511,11 @@ function! s:fim_on_stdout(job_id, data, event) dict
return
endif

" show the suggestion only in insert mode
if mode() !=# 'i'
return
endif

let s:pos_x = self.pos_x
let s:pos_y = self.pos_y

Expand Down