@@ -16,27 +16,70 @@ const INTERVAL = 1000;
1616
1717export class CodestralCompleter implements IBaseCompleter {
1818 constructor ( options : BaseCompleter . IOptions ) {
19+ // this._requestCompletion = options.requestCompletion;
1920 this . _mistralProvider = new MistralAI ( { ...options . settings } ) ;
20- this . _throttler = new Throttler ( async ( data : CompletionRequest ) => {
21- const response = await this . _mistralProvider . completionWithRetry (
22- data ,
23- { } ,
24- false
25- ) ;
26- const items = response . choices . map ( ( choice : any ) => {
27- return { insertText : choice . message . content as string } ;
28- } ) ;
21+ this . _throttler = new Throttler (
22+ async ( data : CompletionRequest ) => {
23+ this . _invokedData = data ;
24+ let fetchAgain = false ;
2925
30- return {
31- items
32- } ;
33- } , INTERVAL ) ;
26+ // Request completion.
27+ const response = await this . _mistralProvider . completionWithRetry (
28+ data ,
29+ { } ,
30+ false
31+ ) ;
32+
33+ // Extract results of completion request.
34+ let items = response . choices . map ( ( choice : any ) => {
35+ return { insertText : choice . message . content as string } ;
36+ } ) ;
37+
38+ // Check if the prompt has changed during the request.
39+ if ( this . _invokedData . prompt !== this . _currentData ?. prompt ) {
40+ // The current prompt does not include the invoked one, the result is
41+ // cancelled and a new completion will be requested.
42+ if ( ! this . _currentData ?. prompt . startsWith ( this . _invokedData . prompt ) ) {
43+ fetchAgain = true ;
44+ items = [ ] ;
45+ } else {
46+ // Check if some results contain the current prompt, and return them if so,
47+ // otherwise request completion again.
48+ const newItems : { insertText : string } [ ] = [ ] ;
49+ items . forEach ( item => {
50+ const result = this . _invokedData ! . prompt + item . insertText ;
51+ if ( result . startsWith ( this . _currentData ! . prompt ) ) {
52+ const insertText = result . slice (
53+ this . _currentData ! . prompt . length
54+ ) ;
55+ newItems . push ( { insertText } ) ;
56+ }
57+ } ) ;
58+ if ( newItems . length ) {
59+ items = newItems ;
60+ } else {
61+ fetchAgain = true ;
62+ items = [ ] ;
63+ }
64+ }
65+ }
66+ return {
67+ items,
68+ fetchAgain
69+ } ;
70+ } ,
71+ { limit : INTERVAL }
72+ ) ;
3473 }
3574
3675 get provider ( ) : LLM {
3776 return this . _mistralProvider ;
3877 }
3978
79+ set requestCompletion ( value : ( ) => void ) {
80+ this . _requestCompletion = value ;
81+ }
82+
4083 async fetch (
4184 request : CompletionHandler . IRequest ,
4285 context : IInlineCompletionContext
@@ -59,13 +102,23 @@ export class CodestralCompleter implements IBaseCompleter {
59102 } ;
60103
61104 try {
62- return this . _throttler . invoke ( data ) ;
105+ this . _currentData = data ;
106+ const completionResult = await this . _throttler . invoke ( data ) ;
107+ if ( completionResult . fetchAgain ) {
108+ if ( this . _requestCompletion ) {
109+ this . _requestCompletion ( ) ;
110+ }
111+ }
112+ return { items : completionResult . items } ;
63113 } catch ( error ) {
64114 console . error ( 'Error fetching completions' , error ) ;
65115 return { items : [ ] } ;
66116 }
67117 }
68118
119+ private _requestCompletion ?: ( ) => void ;
69120 private _throttler : Throttler ;
70121 private _mistralProvider : MistralAI ;
122+ private _invokedData : CompletionRequest | null = null ;
123+ private _currentData : CompletionRequest | null = null ;
71124}
0 commit comments