Skip to content

Commit 23a8b79

Browse files
oobaboogarivm469-dev
authored andcommitted
Merge pull request oobabooga#7366 from oobabooga/dev
Merge dev branch
2 parents a0b5599 + d79cdc6 commit 23a8b79

34 files changed

+488
-55
lines changed
Lines changed: 43 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,43 @@
1+
## Chat Customization Diagnostics
2+
*WARNING: This file may contain sensitive information.*
3+
4+
**Custom Agents**<br>
5+
*1 file loaded*
6+
7+
.github/agents<br>
8+
.claude/agents<br>
9+
User Data<br>
10+
Extension: GitHub.copilot-chat<br>
11+
└─ [`Plan.agent.md`](../../../AppData/Roaming/Code/User/globalStorage/github.copilot-chat/plan-agent/Plan.agent.md)<br>
12+
13+
**Instructions**<br>
14+
15+
.github/instructions<br>
16+
.claude/rules<br>
17+
~/.claude/rules<br>
18+
User Data<br>
19+
20+
21+
**Prompt Files**<br>
22+
*3 files loaded*
23+
24+
.github/prompts<br>
25+
User Data<br>
26+
Extension: GitHub.copilot-chat<br>
27+
├─ [`savePrompt.prompt.md`](../../../.vscode/extensions/github.copilot-chat-0.37.6/assets/prompts/savePrompt.prompt.md)<br>
28+
├─ [`plan.prompt.md`](../../../.vscode/extensions/github.copilot-chat-0.37.6/assets/prompts/plan.prompt.md)<br>
29+
└─ [`init.prompt.md`](../../../.vscode/extensions/github.copilot-chat-0.37.6/assets/prompts/init.prompt.md)<br>
30+
31+
**Skills**<br>
32+
33+
.github/skills<br>
34+
.agents/skills<br>
35+
.claude/skills<br>
36+
~/.copilot/skills<br>
37+
~/.agents/skills<br>
38+
~/.claude/skills<br>
39+
40+
**Hooks**<br>
41+
42+
*No files loaded*
43+

.gitignore

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
/css
1+
$env:OPENAI_API_KEY='sk-REPLACE_WITH_YOUR_KEY'/css
22
/extensions
33
/installer_files
44
/repositories
@@ -31,3 +31,4 @@ wandb
3131
/docker-compose.yml
3232
/Dockerfile
3333
.env
34+
installer_files/conda/python.exe

css/html_instruct_style.css

Lines changed: 5 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -19,12 +19,14 @@
1919
color: #d1d5db !important;
2020
}
2121

22-
.chat .message-body :is(th, td) {
22+
.chat .message-body :is(th, td),
23+
.prose hr {
2324
border-color: #40404096 !important;
2425
}
2526

26-
.dark .chat .message-body :is(th, td) {
27-
border-color: #ffffff75 !important;
27+
.dark .chat .message-body :is(th, td),
28+
.dark .prose hr {
29+
border-color: rgb(255 255 255 / 30%) !important;
2830
}
2931

3032
.chat .message-body :is(p, ul, ol) {

css/main.css

Lines changed: 17 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1797,3 +1797,20 @@ button#swap-height-width {
17971797
top: 0;
17981798
left: calc(100% - 174px);
17991799
}
1800+
1801+
table {
1802+
border-collapse: collapse;
1803+
}
1804+
1805+
table, tr, td, th, thead {
1806+
border: 0;
1807+
}
1808+
1809+
td + td,
1810+
th + th { border-left: 1px solid; }
1811+
1812+
tr + tr td,
1813+
tr + tr th { border-top: 1px solid; }
1814+
1815+
thead + tbody tr:first-child td,
1816+
thead + tbody tr:first-child th { border-top: 1px solid; }

download-model.py

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -435,3 +435,6 @@ def check_model_files(self, model, branch, links, sha256, output_folder):
435435
model, branch, links, sha256, output_folder,
436436
specific_file=specific_file, threads=args.threads, is_llamacpp=is_llamacpp
437437
)
438+
439+
440+

extensions/whisper_stt/requirements.txt

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,4 @@
1+
12
SpeechRecognition==3.10.0
23
openai-whisper
34
soundfile

extensions/whisper_stt/script.js

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -62,7 +62,8 @@ window.startStopRecording = function() {
6262
}
6363
};
6464

65-
const recordButton = gradioApp().querySelector("#record-button");
65+
const
66+
recordButton = gradioApp().querySelector("#record-button");
6667
recordButton.addEventListener("click", window.startStopRecording);
6768

6869

@@ -84,3 +85,5 @@ recButton.innerHTML = "Rec.";
8485
recButton.addEventListener("click", function() {
8586
recordButton.click();
8687
});
88+
89+
'package.json'

modules/models_settings.py

Lines changed: 5 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -83,7 +83,11 @@ def get_model_metadata(model):
8383

8484
if 'tokenizer.chat_template' in metadata:
8585
template = metadata['tokenizer.chat_template']
86-
eos_token = metadata['tokenizer.ggml.tokens'][metadata['tokenizer.ggml.eos_token_id']]
86+
if 'tokenizer.ggml.eos_token_id' in metadata:
87+
eos_token = metadata['tokenizer.ggml.tokens'][metadata['tokenizer.ggml.eos_token_id']]
88+
else:
89+
eos_token = ""
90+
8791
if 'tokenizer.ggml.bos_token_id' in metadata:
8892
bos_token = metadata['tokenizer.ggml.tokens'][metadata['tokenizer.ggml.bos_token_id']]
8993
else:

one_click.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -77,6 +77,7 @@ def cpu_has_avx2():
7777
def cpu_has_amx():
7878
try:
7979
import cpuinfo
80+
8081
info = cpuinfo.get_cpu_info()
8182
return 'amx' in info['flags']
8283
except:

requirements/full/requirements.txt

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -1,12 +1,12 @@
11
accelerate==1.8.*
22
audioop-lts<1.0; python_version >= "3.13"
3-
bitsandbytes==0.48.*
3+
bitsandbytes==0.49.*
44
colorama
55
datasets
66
diffusers==0.36.*
77
einops
88
fastapi==0.112.4
9-
flash-linear-attention==0.4.0
9+
flash-linear-attention==0.4.*
1010
html2text==2025.4.15
1111
huggingface-hub==0.36.0
1212
jinja2==3.1.6
@@ -26,7 +26,7 @@ safetensors==0.7.*
2626
scipy
2727
sentencepiece
2828
tensorboard
29-
torchao==0.14.*
29+
torchao==0.15.*
3030
transformers==4.57.*
3131
triton-windows==3.5.1.post22; platform_system == "Windows"
3232
tqdm
@@ -42,8 +42,8 @@ sse-starlette==1.6.5
4242
tiktoken
4343

4444
# CUDA wheels
45-
https://github.com/oobabooga/llama-cpp-binaries/releases/download/v0.71.0/llama_cpp_binaries-0.71.0+cu124-py3-none-win_amd64.whl; platform_system == "Windows"
46-
https://github.com/oobabooga/llama-cpp-binaries/releases/download/v0.71.0/llama_cpp_binaries-0.71.0+cu124-py3-none-linux_x86_64.whl; platform_system == "Linux" and platform_machine == "x86_64"
45+
https://github.com/oobabooga/llama-cpp-binaries/releases/download/v0.74.0/llama_cpp_binaries-0.74.0+cu124-py3-none-win_amd64.whl; platform_system == "Windows"
46+
https://github.com/oobabooga/llama-cpp-binaries/releases/download/v0.74.0/llama_cpp_binaries-0.74.0+cu124-py3-none-linux_x86_64.whl; platform_system == "Linux" and platform_machine == "x86_64"
4747
https://github.com/turboderp-org/exllamav3/releases/download/v0.0.18/exllamav3-0.0.18+cu128.torch2.7.0-cp311-cp311-win_amd64.whl; platform_system == "Windows" and python_version == "3.11"
4848
https://github.com/turboderp-org/exllamav3/releases/download/v0.0.18/exllamav3-0.0.18+cu128.torch2.7.0-cp311-cp311-linux_x86_64.whl; platform_system == "Linux" and platform_machine == "x86_64" and python_version == "3.11"
4949
https://github.com/turboderp-org/exllamav2/releases/download/v0.3.2/exllamav2-0.3.2+cu128.torch2.7.0-cp311-cp311-win_amd64.whl; platform_system == "Windows" and python_version == "3.11"

0 commit comments

Comments
 (0)