Skip to content

Commit d6bb6e7

Browse files
authored
Merge pull request oobabooga#5549 from oobabooga/dev
Merge dev branch
2 parents 7838075 + 10df23e commit d6bb6e7

13 files changed

+109
-71
lines changed

css/main.css

+22
Original file line numberDiff line numberDiff line change
@@ -341,6 +341,7 @@ div.svelte-362y77>*, div.svelte-362y77>.form>* {
341341
overflow-wrap: anywhere;
342342
border-top: none;
343343
border-radius: 0 0 0 8px;
344+
visibility: visible;
344345
}
345346

346347
.chat-parent {
@@ -350,6 +351,18 @@ div.svelte-362y77>*, div.svelte-362y77>.form>* {
350351
margin-bottom: var(--input-delta) !important;
351352
}
352353

354+
/* On desktop, automatically hide the chat scroll bar
355+
* when not hovered. */
356+
@media (hover: hover) and (pointer: fine) {
357+
.chat-parent {
358+
visibility: hidden;
359+
}
360+
361+
.chat-parent:focus, .chat-parent:hover {
362+
visibility: visible;
363+
}
364+
}
365+
353366
.old-ui .chat-parent {
354367
height: calc(100dvh - 192px - var(--header-height) - var(--input-delta));
355368
margin-bottom: var(--input-delta) !important;
@@ -676,6 +689,15 @@ div.svelte-362y77>*, div.svelte-362y77>.form>* {
676689
max-width: 300px;
677690
margin-left: calc(-0.5*(var(--document-width) - 880px - 14px - 16px * 2));
678691
}
692+
693+
#chat-controls {
694+
position: absolute;
695+
top: 16px;
696+
right: 0;
697+
width: calc(0.5*(var(--document-width) - 880px - 120px - 16px*2));
698+
max-width: 400px;
699+
margin-right: calc(-0.5*(var(--document-width) - 880px - 14px - 16px * 2));
700+
}
679701
}
680702

681703
/* ----------------------------------------------

extensions/openai/completions.py

-2
Original file line numberDiff line numberDiff line change
@@ -297,8 +297,6 @@ def chat_streaming_chunk(content):
297297
resp_list: [{
298298
"index": 0,
299299
"finish_reason": None,
300-
# So yeah... do both methods? delta and messages.
301-
"message": {'role': 'assistant', 'content': content},
302300
"delta": {'role': 'assistant', 'content': content},
303301
}],
304302
}

js/main.js

+17-7
Original file line numberDiff line numberDiff line change
@@ -377,36 +377,46 @@ function toggleBigPicture() {
377377
}
378378

379379
//------------------------------------------------
380-
// Define global CSS properties for resizing and
381-
// positioning certain elements
380+
// Handle the chat input box growth
382381
//------------------------------------------------
383382
let currentChatInputHeight = 0;
384383

384+
// Update chat layout based on chat and input dimensions
385385
function updateCssProperties() {
386-
// Set the height of the chat area
387386
const chatContainer = document.getElementById("chat").parentNode.parentNode.parentNode;
388387
const chatInputHeight = document.querySelector("#chat-input textarea").clientHeight;
388+
389+
// Check if the chat container is visible
389390
if (chatContainer.clientHeight > 0) {
390-
const newChatHeight = `${chatContainer.clientHeight - chatInputHeight + 40}px`;
391+
392+
// Calculate new chat height and adjust CSS properties
393+
var numericHeight = chatContainer.parentNode.clientHeight - chatInputHeight + 40 - 100;
394+
if (document.getElementById("chat-tab").style.paddingBottom != "") {
395+
numericHeight += 20;
396+
}
397+
const newChatHeight = `${numericHeight}px`;
398+
391399
document.documentElement.style.setProperty("--chat-height", newChatHeight);
392400
document.documentElement.style.setProperty("--input-delta", `${chatInputHeight - 40}px`);
393401

394-
// Set the position offset of the chat input box
402+
// Get and set header height
395403
const header = document.querySelector(".header_bar");
396404
const headerHeight = `${header.clientHeight}px`;
397405
document.documentElement.style.setProperty("--header-height", headerHeight);
398406

399-
// Offset the scroll position of the chat area
407+
// Adjust scrollTop based on input height change
400408
if (chatInputHeight !== currentChatInputHeight) {
401-
chatContainer.scrollTop += chatInputHeight > currentChatInputHeight ? chatInputHeight : -chatInputHeight;
409+
chatContainer.scrollTop += chatInputHeight > currentChatInputHeight ? chatInputHeight : -chatInputHeight + 40;
402410
currentChatInputHeight = chatInputHeight;
403411
}
404412
}
405413
}
406414

415+
// Observe textarea size changes and call update function
407416
new ResizeObserver(updateCssProperties)
408417
.observe(document.querySelector("#chat-input textarea"));
409418

419+
// Handle changes in window size
410420
window.addEventListener("resize", updateCssProperties);
411421

412422
//------------------------------------------------

modules/chat.py

+5-1
Original file line numberDiff line numberDiff line change
@@ -82,7 +82,11 @@ def generate_chat_prompt(user_input, state, **kwargs):
8282
history = kwargs.get('history', state['history'])['internal']
8383

8484
# Templates
85-
chat_template = jinja_env.from_string(state['chat_template_str'])
85+
chat_template_str = state['chat_template_str']
86+
if state['mode'] != 'instruct':
87+
chat_template_str = replace_character_names(chat_template_str, state['name1'], state['name2'])
88+
89+
chat_template = jinja_env.from_string(chat_template_str)
8690
instruction_template = jinja_env.from_string(state['instruction_template_str'])
8791
chat_renderer = partial(chat_template.render, add_generation_prompt=False, name1=state['name1'], name2=state['name2'])
8892
instruct_renderer = partial(instruction_template.render, add_generation_prompt=False)

modules/ui_chat.py

+9-5
Original file line numberDiff line numberDiff line change
@@ -77,12 +77,16 @@ def create_ui():
7777
shared.gradio['rename_to-confirm'] = gr.Button('Confirm', visible=False, elem_classes='refresh-button')
7878
shared.gradio['rename_to-cancel'] = gr.Button('Cancel', visible=False, elem_classes='refresh-button')
7979

80-
with gr.Row():
81-
shared.gradio['start_with'] = gr.Textbox(label='Start reply with', placeholder='Sure thing!', value=shared.settings['start_with'])
80+
with gr.Row(elem_id='chat-controls', elem_classes=['pretty_scrollbar']):
81+
with gr.Column():
82+
with gr.Row():
83+
shared.gradio['start_with'] = gr.Textbox(label='Start reply with', placeholder='Sure thing!', value=shared.settings['start_with'], elem_classes=['add_scrollbar'])
8284

83-
with gr.Row():
84-
shared.gradio['mode'] = gr.Radio(choices=['chat', 'chat-instruct', 'instruct'], value='chat', label='Mode', info='Defines how the chat prompt is generated. In instruct and chat-instruct modes, the instruction template selected under Parameters > Instruction template must match the current model.', elem_id='chat-mode')
85-
shared.gradio['chat_style'] = gr.Dropdown(choices=utils.get_available_chat_styles(), label='Chat style', value=shared.settings['chat_style'], visible=shared.settings['mode'] != 'instruct')
85+
with gr.Row():
86+
shared.gradio['mode'] = gr.Radio(choices=['chat', 'chat-instruct', 'instruct'], value='chat', label='Mode', info='Defines how the chat prompt is generated. In instruct and chat-instruct modes, the instruction template selected under Parameters > Instruction template must match the current model.', elem_id='chat-mode')
87+
88+
with gr.Row():
89+
shared.gradio['chat_style'] = gr.Dropdown(choices=utils.get_available_chat_styles(), label='Chat style', value=shared.settings['chat_style'], visible=shared.settings['mode'] != 'instruct')
8690

8791

8892
def create_chat_settings_ui():

requirements.txt

+12-12
Original file line numberDiff line numberDiff line change
@@ -28,22 +28,22 @@ bitsandbytes==0.42.*; platform_system != "Windows"
2828
https://github.com/jllllll/bitsandbytes-windows-webui/releases/download/wheels/bitsandbytes-0.41.1-py3-none-win_amd64.whl; platform_system == "Windows"
2929

3030
# llama-cpp-python (CPU only, AVX2)
31-
https://github.com/oobabooga/llama-cpp-python-cuBLAS-wheels/releases/download/cpu/llama_cpp_python-0.2.43+cpuavx2-cp311-cp311-manylinux_2_31_x86_64.whl; platform_system == "Linux" and platform_machine == "x86_64" and python_version == "3.11"
32-
https://github.com/oobabooga/llama-cpp-python-cuBLAS-wheels/releases/download/cpu/llama_cpp_python-0.2.43+cpuavx2-cp310-cp310-manylinux_2_31_x86_64.whl; platform_system == "Linux" and platform_machine == "x86_64" and python_version == "3.10"
33-
https://github.com/oobabooga/llama-cpp-python-cuBLAS-wheels/releases/download/cpu/llama_cpp_python-0.2.43+cpuavx2-cp311-cp311-win_amd64.whl; platform_system == "Windows" and python_version == "3.11"
34-
https://github.com/oobabooga/llama-cpp-python-cuBLAS-wheels/releases/download/cpu/llama_cpp_python-0.2.43+cpuavx2-cp310-cp310-win_amd64.whl; platform_system == "Windows" and python_version == "3.10"
31+
https://github.com/oobabooga/llama-cpp-python-cuBLAS-wheels/releases/download/cpu/llama_cpp_python-0.2.44+cpuavx2-cp311-cp311-manylinux_2_31_x86_64.whl; platform_system == "Linux" and platform_machine == "x86_64" and python_version == "3.11"
32+
https://github.com/oobabooga/llama-cpp-python-cuBLAS-wheels/releases/download/cpu/llama_cpp_python-0.2.44+cpuavx2-cp310-cp310-manylinux_2_31_x86_64.whl; platform_system == "Linux" and platform_machine == "x86_64" and python_version == "3.10"
33+
https://github.com/oobabooga/llama-cpp-python-cuBLAS-wheels/releases/download/cpu/llama_cpp_python-0.2.44+cpuavx2-cp311-cp311-win_amd64.whl; platform_system == "Windows" and python_version == "3.11"
34+
https://github.com/oobabooga/llama-cpp-python-cuBLAS-wheels/releases/download/cpu/llama_cpp_python-0.2.44+cpuavx2-cp310-cp310-win_amd64.whl; platform_system == "Windows" and python_version == "3.10"
3535

3636
# llama-cpp-python (CUDA, no tensor cores)
37-
https://github.com/oobabooga/llama-cpp-python-cuBLAS-wheels/releases/download/textgen-webui/llama_cpp_python_cuda-0.2.43+cu121-cp311-cp311-win_amd64.whl; platform_system == "Windows" and python_version == "3.11"
38-
https://github.com/oobabooga/llama-cpp-python-cuBLAS-wheels/releases/download/textgen-webui/llama_cpp_python_cuda-0.2.43+cu121-cp310-cp310-win_amd64.whl; platform_system == "Windows" and python_version == "3.10"
39-
https://github.com/oobabooga/llama-cpp-python-cuBLAS-wheels/releases/download/textgen-webui/llama_cpp_python_cuda-0.2.43+cu121-cp311-cp311-manylinux_2_31_x86_64.whl; platform_system == "Linux" and platform_machine == "x86_64" and python_version == "3.11"
40-
https://github.com/oobabooga/llama-cpp-python-cuBLAS-wheels/releases/download/textgen-webui/llama_cpp_python_cuda-0.2.43+cu121-cp310-cp310-manylinux_2_31_x86_64.whl; platform_system == "Linux" and platform_machine == "x86_64" and python_version == "3.10"
37+
https://github.com/oobabooga/llama-cpp-python-cuBLAS-wheels/releases/download/textgen-webui/llama_cpp_python_cuda-0.2.44+cu121-cp311-cp311-win_amd64.whl; platform_system == "Windows" and python_version == "3.11"
38+
https://github.com/oobabooga/llama-cpp-python-cuBLAS-wheels/releases/download/textgen-webui/llama_cpp_python_cuda-0.2.44+cu121-cp310-cp310-win_amd64.whl; platform_system == "Windows" and python_version == "3.10"
39+
https://github.com/oobabooga/llama-cpp-python-cuBLAS-wheels/releases/download/textgen-webui/llama_cpp_python_cuda-0.2.44+cu121-cp311-cp311-manylinux_2_31_x86_64.whl; platform_system == "Linux" and platform_machine == "x86_64" and python_version == "3.11"
40+
https://github.com/oobabooga/llama-cpp-python-cuBLAS-wheels/releases/download/textgen-webui/llama_cpp_python_cuda-0.2.44+cu121-cp310-cp310-manylinux_2_31_x86_64.whl; platform_system == "Linux" and platform_machine == "x86_64" and python_version == "3.10"
4141

4242
# llama-cpp-python (CUDA, tensor cores)
43-
https://github.com/oobabooga/llama-cpp-python-cuBLAS-wheels/releases/download/textgen-webui/llama_cpp_python_cuda_tensorcores-0.2.43+cu121-cp311-cp311-win_amd64.whl; platform_system == "Windows" and python_version == "3.11"
44-
https://github.com/oobabooga/llama-cpp-python-cuBLAS-wheels/releases/download/textgen-webui/llama_cpp_python_cuda_tensorcores-0.2.43+cu121-cp310-cp310-win_amd64.whl; platform_system == "Windows" and python_version == "3.10"
45-
https://github.com/oobabooga/llama-cpp-python-cuBLAS-wheels/releases/download/textgen-webui/llama_cpp_python_cuda_tensorcores-0.2.43+cu121-cp311-cp311-manylinux_2_31_x86_64.whl; platform_system == "Linux" and platform_machine == "x86_64" and python_version == "3.11"
46-
https://github.com/oobabooga/llama-cpp-python-cuBLAS-wheels/releases/download/textgen-webui/llama_cpp_python_cuda_tensorcores-0.2.43+cu121-cp310-cp310-manylinux_2_31_x86_64.whl; platform_system == "Linux" and platform_machine == "x86_64" and python_version == "3.10"
43+
https://github.com/oobabooga/llama-cpp-python-cuBLAS-wheels/releases/download/textgen-webui/llama_cpp_python_cuda_tensorcores-0.2.44+cu121-cp311-cp311-win_amd64.whl; platform_system == "Windows" and python_version == "3.11"
44+
https://github.com/oobabooga/llama-cpp-python-cuBLAS-wheels/releases/download/textgen-webui/llama_cpp_python_cuda_tensorcores-0.2.44+cu121-cp310-cp310-win_amd64.whl; platform_system == "Windows" and python_version == "3.10"
45+
https://github.com/oobabooga/llama-cpp-python-cuBLAS-wheels/releases/download/textgen-webui/llama_cpp_python_cuda_tensorcores-0.2.44+cu121-cp311-cp311-manylinux_2_31_x86_64.whl; platform_system == "Linux" and platform_machine == "x86_64" and python_version == "3.11"
46+
https://github.com/oobabooga/llama-cpp-python-cuBLAS-wheels/releases/download/textgen-webui/llama_cpp_python_cuda_tensorcores-0.2.44+cu121-cp310-cp310-manylinux_2_31_x86_64.whl; platform_system == "Linux" and platform_machine == "x86_64" and python_version == "3.10"
4747

4848
# CUDA wheels
4949
https://github.com/jllllll/AutoGPTQ/releases/download/v0.6.0/auto_gptq-0.6.0+cu121-cp311-cp311-win_amd64.whl; platform_system == "Windows" and python_version == "3.11"

requirements_amd.txt

+6-6
Original file line numberDiff line numberDiff line change
@@ -28,14 +28,14 @@ bitsandbytes==0.38.1; platform_system != "Windows"
2828
https://github.com/jllllll/bitsandbytes-windows-webui/releases/download/wheels/bitsandbytes-0.38.1-py3-none-win_amd64.whl; platform_system == "Windows"
2929

3030
# llama-cpp-python (CPU only, AVX2)
31-
https://github.com/oobabooga/llama-cpp-python-cuBLAS-wheels/releases/download/cpu/llama_cpp_python-0.2.43+cpuavx2-cp311-cp311-manylinux_2_31_x86_64.whl; platform_system == "Linux" and platform_machine == "x86_64" and python_version == "3.11"
32-
https://github.com/oobabooga/llama-cpp-python-cuBLAS-wheels/releases/download/cpu/llama_cpp_python-0.2.43+cpuavx2-cp310-cp310-manylinux_2_31_x86_64.whl; platform_system == "Linux" and platform_machine == "x86_64" and python_version == "3.10"
33-
https://github.com/oobabooga/llama-cpp-python-cuBLAS-wheels/releases/download/cpu/llama_cpp_python-0.2.43+cpuavx2-cp311-cp311-win_amd64.whl; platform_system == "Windows" and python_version == "3.11"
34-
https://github.com/oobabooga/llama-cpp-python-cuBLAS-wheels/releases/download/cpu/llama_cpp_python-0.2.43+cpuavx2-cp310-cp310-win_amd64.whl; platform_system == "Windows" and python_version == "3.10"
31+
https://github.com/oobabooga/llama-cpp-python-cuBLAS-wheels/releases/download/cpu/llama_cpp_python-0.2.44+cpuavx2-cp311-cp311-manylinux_2_31_x86_64.whl; platform_system == "Linux" and platform_machine == "x86_64" and python_version == "3.11"
32+
https://github.com/oobabooga/llama-cpp-python-cuBLAS-wheels/releases/download/cpu/llama_cpp_python-0.2.44+cpuavx2-cp310-cp310-manylinux_2_31_x86_64.whl; platform_system == "Linux" and platform_machine == "x86_64" and python_version == "3.10"
33+
https://github.com/oobabooga/llama-cpp-python-cuBLAS-wheels/releases/download/cpu/llama_cpp_python-0.2.44+cpuavx2-cp311-cp311-win_amd64.whl; platform_system == "Windows" and python_version == "3.11"
34+
https://github.com/oobabooga/llama-cpp-python-cuBLAS-wheels/releases/download/cpu/llama_cpp_python-0.2.44+cpuavx2-cp310-cp310-win_amd64.whl; platform_system == "Windows" and python_version == "3.10"
3535

3636
# AMD wheels
37-
https://github.com/oobabooga/llama-cpp-python-cuBLAS-wheels/releases/download/rocm/llama_cpp_python_cuda-0.2.43+rocm5.6.1-cp311-cp311-manylinux_2_31_x86_64.whl; platform_system == "Linux" and platform_machine == "x86_64" and python_version == "3.11"
38-
https://github.com/oobabooga/llama-cpp-python-cuBLAS-wheels/releases/download/rocm/llama_cpp_python_cuda-0.2.43+rocm5.6.1-cp310-cp310-manylinux_2_31_x86_64.whl; platform_system == "Linux" and platform_machine == "x86_64" and python_version == "3.10"
37+
https://github.com/oobabooga/llama-cpp-python-cuBLAS-wheels/releases/download/rocm/llama_cpp_python_cuda-0.2.44+rocm5.6.1-cp311-cp311-manylinux_2_31_x86_64.whl; platform_system == "Linux" and platform_machine == "x86_64" and python_version == "3.11"
38+
https://github.com/oobabooga/llama-cpp-python-cuBLAS-wheels/releases/download/rocm/llama_cpp_python_cuda-0.2.44+rocm5.6.1-cp310-cp310-manylinux_2_31_x86_64.whl; platform_system == "Linux" and platform_machine == "x86_64" and python_version == "3.10"
3939
https://github.com/jllllll/AutoGPTQ/releases/download/v0.6.0/auto_gptq-0.6.0+rocm5.6-cp311-cp311-linux_x86_64.whl; platform_system == "Linux" and platform_machine == "x86_64" and python_version == "3.11"
4040
https://github.com/jllllll/AutoGPTQ/releases/download/v0.6.0/auto_gptq-0.6.0+rocm5.6-cp310-cp310-linux_x86_64.whl; platform_system == "Linux" and platform_machine == "x86_64" and python_version == "3.10"
4141
https://github.com/oobabooga/exllamav2/releases/download/v0.0.13.2/exllamav2-0.0.13.2+rocm5.6-cp311-cp311-linux_x86_64.whl; platform_system == "Linux" and platform_machine == "x86_64" and python_version == "3.11"

requirements_amd_noavx2.txt

+4-4
Original file line numberDiff line numberDiff line change
@@ -28,10 +28,10 @@ bitsandbytes==0.38.1; platform_system != "Windows"
2828
https://github.com/jllllll/bitsandbytes-windows-webui/releases/download/wheels/bitsandbytes-0.38.1-py3-none-win_amd64.whl; platform_system == "Windows"
2929

3030
# llama-cpp-python (CPU only, no AVX2)
31-
https://github.com/oobabooga/llama-cpp-python-cuBLAS-wheels/releases/download/cpu/llama_cpp_python-0.2.43+cpuavx-cp311-cp311-manylinux_2_31_x86_64.whl; platform_system == "Linux" and platform_machine == "x86_64" and python_version == "3.11"
32-
https://github.com/oobabooga/llama-cpp-python-cuBLAS-wheels/releases/download/cpu/llama_cpp_python-0.2.43+cpuavx-cp310-cp310-manylinux_2_31_x86_64.whl; platform_system == "Linux" and platform_machine == "x86_64" and python_version == "3.10"
33-
https://github.com/oobabooga/llama-cpp-python-cuBLAS-wheels/releases/download/cpu/llama_cpp_python-0.2.43+cpuavx-cp311-cp311-win_amd64.whl; platform_system == "Windows" and python_version == "3.11"
34-
https://github.com/oobabooga/llama-cpp-python-cuBLAS-wheels/releases/download/cpu/llama_cpp_python-0.2.43+cpuavx-cp310-cp310-win_amd64.whl; platform_system == "Windows" and python_version == "3.10"
31+
https://github.com/oobabooga/llama-cpp-python-cuBLAS-wheels/releases/download/cpu/llama_cpp_python-0.2.44+cpuavx-cp311-cp311-manylinux_2_31_x86_64.whl; platform_system == "Linux" and platform_machine == "x86_64" and python_version == "3.11"
32+
https://github.com/oobabooga/llama-cpp-python-cuBLAS-wheels/releases/download/cpu/llama_cpp_python-0.2.44+cpuavx-cp310-cp310-manylinux_2_31_x86_64.whl; platform_system == "Linux" and platform_machine == "x86_64" and python_version == "3.10"
33+
https://github.com/oobabooga/llama-cpp-python-cuBLAS-wheels/releases/download/cpu/llama_cpp_python-0.2.44+cpuavx-cp311-cp311-win_amd64.whl; platform_system == "Windows" and python_version == "3.11"
34+
https://github.com/oobabooga/llama-cpp-python-cuBLAS-wheels/releases/download/cpu/llama_cpp_python-0.2.44+cpuavx-cp310-cp310-win_amd64.whl; platform_system == "Windows" and python_version == "3.10"
3535

3636
# AMD wheels
3737
https://github.com/jllllll/AutoGPTQ/releases/download/v0.6.0/auto_gptq-0.6.0+rocm5.6-cp311-cp311-linux_x86_64.whl; platform_system == "Linux" and platform_machine == "x86_64" and python_version == "3.11"

0 commit comments

Comments
 (0)