@@ -301,10 +301,10 @@ def _raw_generate(
301
301
# Store context in memory to use it for comparison with generated content
302
302
utils .koboldai_vars .lastctx = decoded_prompt
303
303
304
- self .input_queue .put ({'command' : 'generate' , 'data' : [(decoded_prompt ,"" , max_new ,utils .koboldai_vars .max_length ,
305
- gen_settings .temp ,int (gen_settings .top_k ),gen_settings .top_a ,gen_settings .top_p ,
306
- gen_settings .typical ,gen_settings .tfs ,gen_settings .rep_pen ,gen_settings .rep_pen_range ) ,
307
- { "sampler_order" : gen_settings .sampler_order , "use_default_badwordsids" : utils .koboldai_vars .use_default_badwordsids }
304
+ self .input_queue .put ({'command' : 'generate' , 'data' : [(decoded_prompt ,), { 'max_length' : max_new , 'max_context_length' : utils .koboldai_vars .max_length ,
305
+ 'temperature' : gen_settings .temp , 'top_k' : int (gen_settings .top_k ), 'top_a' : gen_settings .top_a , 'top_p' : gen_settings .top_p ,
306
+ 'typical_p' : gen_settings .typical , 'tfs' : gen_settings .tfs , 'rep_pen' : gen_settings .rep_pen , 'rep_pen_range' : gen_settings .rep_pen_range ,
307
+ "sampler_order" : gen_settings .sampler_order , "use_default_badwordsids" : utils .koboldai_vars .use_default_badwordsids }
308
308
]})
309
309
310
310
#genresult = koboldcpp.generate(decoded_prompt,"",max_new,utils.koboldai_vars.max_length,
0 commit comments