Upload tokenizer
Browse files- README.md +1 -1
- tokenizer_config.json +2 -0
README.md
CHANGED
|
@@ -1,8 +1,8 @@
|
|
| 1 |
---
|
| 2 |
language:
|
| 3 |
- en
|
| 4 |
-
license: mit
|
| 5 |
library_name: transformers
|
|
|
|
| 6 |
tags:
|
| 7 |
- unsloth
|
| 8 |
- phi3
|
|
|
|
| 1 |
---
|
| 2 |
language:
|
| 3 |
- en
|
|
|
|
| 4 |
library_name: transformers
|
| 5 |
+
license: mit
|
| 6 |
tags:
|
| 7 |
- unsloth
|
| 8 |
- phi3
|
tokenizer_config.json
CHANGED
|
@@ -1,6 +1,7 @@
|
|
| 1 |
{
|
| 2 |
"add_bos_token": false,
|
| 3 |
"add_eos_token": false,
|
|
|
|
| 4 |
"added_tokens_decoder": {
|
| 5 |
"0": {
|
| 6 |
"content": "<unk>",
|
|
@@ -119,6 +120,7 @@
|
|
| 119 |
"chat_template": "{% for message in messages %}{% if (message['role'] == 'user') %}{{'<|user|>' + '\n' + message['content'] + '<|end|>' + '\n' + '<|assistant|>' + '\n'}}{% elif (message['role'] == 'assistant') %}{{message['content'] + '<|end|>' + '\n'}}{% endif %}{% endfor %}",
|
| 120 |
"clean_up_tokenization_spaces": false,
|
| 121 |
"eos_token": "<|endoftext|>",
|
|
|
|
| 122 |
"model_max_length": 4096,
|
| 123 |
"pad_token": "<|placeholder6|>",
|
| 124 |
"padding_side": "left",
|
|
|
|
| 1 |
{
|
| 2 |
"add_bos_token": false,
|
| 3 |
"add_eos_token": false,
|
| 4 |
+
"add_prefix_space": null,
|
| 5 |
"added_tokens_decoder": {
|
| 6 |
"0": {
|
| 7 |
"content": "<unk>",
|
|
|
|
| 120 |
"chat_template": "{% for message in messages %}{% if (message['role'] == 'user') %}{{'<|user|>' + '\n' + message['content'] + '<|end|>' + '\n' + '<|assistant|>' + '\n'}}{% elif (message['role'] == 'assistant') %}{{message['content'] + '<|end|>' + '\n'}}{% endif %}{% endfor %}",
|
| 121 |
"clean_up_tokenization_spaces": false,
|
| 122 |
"eos_token": "<|endoftext|>",
|
| 123 |
+
"legacy": false,
|
| 124 |
"model_max_length": 4096,
|
| 125 |
"pad_token": "<|placeholder6|>",
|
| 126 |
"padding_side": "left",
|