pszemraj commited on
Commit
fec01d1
·
verified ·
0 Parent(s):

Super-squash branch 'main' using huggingface_hub

Browse files
.gitattributes ADDED
@@ -0,0 +1,35 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ *.7z filter=lfs diff=lfs merge=lfs -text
2
+ *.arrow filter=lfs diff=lfs merge=lfs -text
3
+ *.bin filter=lfs diff=lfs merge=lfs -text
4
+ *.bz2 filter=lfs diff=lfs merge=lfs -text
5
+ *.ckpt filter=lfs diff=lfs merge=lfs -text
6
+ *.ftz filter=lfs diff=lfs merge=lfs -text
7
+ *.gz filter=lfs diff=lfs merge=lfs -text
8
+ *.h5 filter=lfs diff=lfs merge=lfs -text
9
+ *.joblib filter=lfs diff=lfs merge=lfs -text
10
+ *.lfs.* filter=lfs diff=lfs merge=lfs -text
11
+ *.mlmodel filter=lfs diff=lfs merge=lfs -text
12
+ *.model filter=lfs diff=lfs merge=lfs -text
13
+ *.msgpack filter=lfs diff=lfs merge=lfs -text
14
+ *.npy filter=lfs diff=lfs merge=lfs -text
15
+ *.npz filter=lfs diff=lfs merge=lfs -text
16
+ *.onnx filter=lfs diff=lfs merge=lfs -text
17
+ *.ot filter=lfs diff=lfs merge=lfs -text
18
+ *.parquet filter=lfs diff=lfs merge=lfs -text
19
+ *.pb filter=lfs diff=lfs merge=lfs -text
20
+ *.pickle filter=lfs diff=lfs merge=lfs -text
21
+ *.pkl filter=lfs diff=lfs merge=lfs -text
22
+ *.pt filter=lfs diff=lfs merge=lfs -text
23
+ *.pth filter=lfs diff=lfs merge=lfs -text
24
+ *.rar filter=lfs diff=lfs merge=lfs -text
25
+ *.safetensors filter=lfs diff=lfs merge=lfs -text
26
+ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
27
+ *.tar.* filter=lfs diff=lfs merge=lfs -text
28
+ *.tar filter=lfs diff=lfs merge=lfs -text
29
+ *.tflite filter=lfs diff=lfs merge=lfs -text
30
+ *.tgz filter=lfs diff=lfs merge=lfs -text
31
+ *.wasm filter=lfs diff=lfs merge=lfs -text
32
+ *.xz filter=lfs diff=lfs merge=lfs -text
33
+ *.zip filter=lfs diff=lfs merge=lfs -text
34
+ *.zst filter=lfs diff=lfs merge=lfs -text
35
+ *tfevents* filter=lfs diff=lfs merge=lfs -text
README.md ADDED
@@ -0,0 +1,273 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ---
2
+ license: apache-2.0
3
+ datasets:
4
+ - BEE-spoke-data/pypi_clean-deduped
5
+ source_model: BEE-spoke-data/smol_llama-101M-GQA
6
+ language:
7
+ - en
8
+ tags:
9
+ - python
10
+ - codegen
11
+ - markdown
12
+ - smol_llama
13
+ metrics:
14
+ - accuracy
15
+ inference:
16
+ parameters:
17
+ max_new_tokens: 48
18
+ min_new_tokens: 8
19
+ num_beams: 3
20
+ early_stopping: true
21
+ repetition_penalty: 1.1
22
+ no_repeat_ngram_size: 6
23
+ renormalize_logits: true
24
+ widget:
25
+ - text: |
26
+ def add_numbers(a, b):
27
+ return
28
+ example_title: Add Numbers Function
29
+ - text: |
30
+ class Car:
31
+ def __init__(self, make, model):
32
+ self.make = make
33
+ self.model = model
34
+
35
+ def display_car(self):
36
+ example_title: Car Class
37
+ - text: |
38
+ import pandas as pd
39
+ data = {'Name': ['Tom', 'Nick', 'John'], 'Age': [20, 21, 19]}
40
+ df = pd.DataFrame(data).convert_dtypes()
41
+ # eda
42
+
43
+ example_title: Pandas DataFrame
44
+ - text: |
45
+ def factorial(n):
46
+ if n == 0:
47
+ return 1
48
+ else:
49
+ example_title: Factorial Function
50
+ - text: |
51
+ def fibonacci(n):
52
+ if n <= 0:
53
+ raise ValueError("Incorrect input")
54
+ elif n == 1:
55
+ return 0
56
+ elif n == 2:
57
+ return 1
58
+ else:
59
+ example_title: Fibonacci Function
60
+ - text: |
61
+ import matplotlib.pyplot as plt
62
+ import numpy as np
63
+ x = np.linspace(0, 10, 100)
64
+ # simple plot
65
+
66
+ example_title: Matplotlib Plot
67
+ - text: |
68
+ def reverse_string(s:str) -> str:
69
+ return
70
+ example_title: Reverse String Function
71
+ - text: |
72
+ def is_palindrome(word:str) -> bool:
73
+ return
74
+ example_title: Palindrome Function
75
+ - text: |
76
+ def bubble_sort(lst: list):
77
+ n = len(lst)
78
+ for i in range(n):
79
+ for j in range(0, n-i-1):
80
+ example_title: Bubble Sort Function
81
+ - text: |
82
+ def binary_search(arr, low, high, x):
83
+ if high >= low:
84
+ mid = (high + low) // 2
85
+ if arr[mid] == x:
86
+ return mid
87
+ elif arr[mid] > x:
88
+ example_title: Binary Search Function
89
+ ---
90
+
91
+ # smol_llama-101M-GQA: python
92
+
93
+ <a href="https://colab.research.google.com/gist/pszemraj/91b5a267df95461b46922e6c0212e8f7/beecoder-basic-test-notebook.ipynb">
94
+ <img src="https://colab.research.google.com/assets/colab-badge.svg" alt="Open In Colab"/>
95
+ </a>
96
+
97
+ > 400MB of buzz: pure Python programming nectar! 🍯
98
+
99
+ This model is the general pre-trained checkpoint `BEE-spoke-data/smol_llama-101M-GQA` trained on a deduped version of `pypi` for +1 epoch. Play with the model in [this demo space](https://huggingface.co/spaces/BEE-spoke-data/beecoder-playground).
100
+
101
+ - Its architecture is the same as the base, with some new Python-related tokens added to vocab prior to training.
102
+ - It can generate basic Python code and markdown in README style, but will struggle with harder planning/reasoning tasks
103
+ - This is an experiment to test the abilities of smol-sized models in code generation; meaning **both** its capabilities and limitations
104
+
105
+ Use with care & understand that there may be some bugs 🐛 still to be worked out.
106
+
107
+ ## Usage
108
+
109
+ 📌 Be sure to note:
110
+
111
+ 1. The model uses the "slow" llama2 tokenizer. Set use_fast=False when loading the tokenizer.
112
+ 2. Use transformers library version 4.33.3 due to a known issue in version 4.34.1 (_at time of writing_)
113
+
114
+ > Which llama2 tokenizer the API widget uses is an age-old mystery, and may cause minor whitespace issues (widget only).
115
+
116
+ To install the necessary packages and load the model:
117
+
118
+ ```python
119
+ # Install necessary packages
120
+ # pip install transformers==4.33.3 accelerate sentencepiece
121
+
122
+ from transformers import AutoTokenizer, AutoModelForCausalLM
123
+
124
+ # Load the tokenizer and model
125
+ tokenizer = AutoTokenizer.from_pretrained(
126
+ "BEE-spoke-data/smol_llama-101M-GQA-python",
127
+ use_fast=False,
128
+ )
129
+ model = AutoModelForCausalLM.from_pretrained(
130
+ "BEE-spoke-data/smol_llama-101M-GQA-python",
131
+ device_map="auto",
132
+ )
133
+
134
+ # The model can now be used as any other decoder
135
+ ```
136
+
137
+ ### longer code-gen example
138
+
139
+
140
+ Below is a quick script that can be used as a reference/starting point for writing your own, better one :)
141
+
142
+
143
+
144
+ <details>
145
+ <summary>🔥 Unleash the Power of Code Generation! Click to Reveal the Magic! 🔮</summary>
146
+
147
+ Are you ready to witness the incredible possibilities of code generation? 🚀. Brace yourself for an exceptional journey into the world of artificial intelligence and programming. Observe a script that will change the way you create and finalize code.
148
+
149
+ This script provides entry to a planet where machines can write code with remarkable precision and imagination.
150
+
151
+ ```python
152
+ """
153
+ simple script for testing model(s) designed to generate/complete code
154
+
155
+ See details/args with the below.
156
+ python textgen_inference_code.py --help
157
+ """
158
+ import logging
159
+ import random
160
+ import time
161
+ from pathlib import Path
162
+
163
+ import fire
164
+ import torch
165
+ from transformers import AutoModelForCausalLM, AutoTokenizer
166
+
167
+ logging.basicConfig(format="%(levelname)s - %(message)s", level=logging.INFO)
168
+
169
+
170
+ class Timer:
171
+ """
172
+ Basic timer utility.
173
+ """
174
+
175
+ def __enter__(self):
176
+
177
+ self.start_time = time.perf_counter()
178
+ return self
179
+
180
+ def __exit__(self, exc_type, exc_value, traceback):
181
+
182
+ self.end_time = time.perf_counter()
183
+ self.elapsed_time = self.end_time - self.start_time
184
+ logging.info(f"Elapsed time: {self.elapsed_time:.4f} seconds")
185
+
186
+
187
+ def load_model(model_name, use_fast=False):
188
+ """ util for loading model and tokenizer"""
189
+ logging.info(f"Loading model: {model_name}")
190
+ tokenizer = AutoTokenizer.from_pretrained(model_name, use_fast=use_fast)
191
+ model = AutoModelForCausalLM.from_pretrained(
192
+ model_name, torch_dtype="auto", device_map="auto"
193
+ )
194
+ model = torch.compile(model)
195
+ return tokenizer, model
196
+
197
+
198
+ def run_inference(prompt, model, tokenizer, max_new_tokens: int = 256):
199
+ """
200
+ run_inference
201
+
202
+ Args:
203
+ prompt (TYPE): Description
204
+ model (TYPE): Description
205
+ tokenizer (TYPE): Description
206
+ max_new_tokens (int, optional): Description
207
+
208
+ Returns:
209
+ TYPE: Description
210
+ """
211
+ logging.info(f"Running inference with max_new_tokens={max_new_tokens} ...")
212
+ with Timer() as timer:
213
+ inputs = tokenizer(prompt, return_tensors="pt").to(model.device)
214
+ outputs = model.generate(
215
+ **inputs,
216
+ max_new_tokens=max_new_tokens,
217
+ min_new_tokens=8,
218
+ renormalize_logits=True,
219
+ no_repeat_ngram_size=8,
220
+ repetition_penalty=1.04,
221
+ num_beams=4,
222
+ early_stopping=True,
223
+ )
224
+ text = tokenizer.batch_decode(outputs, skip_special_tokens=True)[0]
225
+ logging.info(f"Output text:\n\n{text}")
226
+ return text
227
+
228
+
229
+ def main(
230
+ model_name="BEE-spoke-data/smol_llama-101M-GQA-python",
231
+ prompt:str=None,
232
+ use_fast=False,
233
+ n_tokens: int = 256,
234
+ ):
235
+ """Summary
236
+
237
+ Args:
238
+ model_name (str, optional): Description
239
+ prompt (None, optional): specify the prompt directly (default: random choice from list)
240
+ n_tokens (int, optional): max new tokens to generate
241
+ """
242
+ logging.info(f"Inference with:\t{model_name}, max_new_tokens:{n_tokens}")
243
+
244
+ if prompt is None:
245
+ prompt_list = [
246
+ '''
247
+ def print_primes(n: int):
248
+ """
249
+ Print all primes between 1 and n
250
+ """''',
251
+ "def quantum_analysis(",
252
+ "def sanitize_filenames(target_dir:str, recursive:False, extension",
253
+ ]
254
+ prompt = random.SystemRandom().choice(prompt_list)
255
+
256
+ logging.info(f"Using prompt:\t{prompt}")
257
+
258
+ tokenizer, model = load_model(model_name, use_fast=use_fast)
259
+
260
+ run_inference(prompt, model, tokenizer, n_tokens)
261
+
262
+
263
+ if __name__ == "__main__":
264
+ fire.Fire(main)
265
+ ```
266
+
267
+ Wowoweewa!! It can create some file cleaning utilities.
268
+
269
+
270
+ </details>
271
+
272
+
273
+ ---
added_tokens.json ADDED
@@ -0,0 +1,84 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "**kwargs": 32000,
3
+ "*args": 32026,
4
+ "<0x0A><0x0A>": 32070,
5
+ "<0x0A><0x0A><0x0A>": 32071,
6
+ ">>> ": 32036,
7
+ "@abstractmethod": 32001,
8
+ "@classmethod": 32002,
9
+ "@property": 32003,
10
+ "@staticmethod": 32004,
11
+ "AttributeError": 32038,
12
+ "Callable": 32039,
13
+ "DELETE": 32030,
14
+ "GraphQL": 32027,
15
+ "ImportError": 32040,
16
+ "JOIN": 32031,
17
+ "JWT": 32028,
18
+ "KeyError": 32041,
19
+ "NotImplementedError": 32042,
20
+ "Notes": 32035,
21
+ "OAuth": 32029,
22
+ "README": 32043,
23
+ "Raises": 32037,
24
+ "RuntimeError": 32044,
25
+ "TODO": 32034,
26
+ "Tuple": 32045,
27
+ "TypeError": 32046,
28
+ "ValueError": 32047,
29
+ "__all__": 32005,
30
+ "__call__": 32006,
31
+ "__class__": 32007,
32
+ "__contains__": 32008,
33
+ "__del__": 32010,
34
+ "__delitem__": 32009,
35
+ "__dict__": 32011,
36
+ "__doc__": 32012,
37
+ "__file__": 32013,
38
+ "__future__": 32014,
39
+ "__getitem__": 32015,
40
+ "__init__": 32016,
41
+ "__len__": 32017,
42
+ "__main__": 32018,
43
+ "__name__": 32019,
44
+ "__package__": 32020,
45
+ "__props__": 32021,
46
+ "__repr__": 32022,
47
+ "__self__": 32023,
48
+ "__setitem__": 32024,
49
+ "__str__": 32025,
50
+ "asyncio": 32048,
51
+ "cuda": 32051,
52
+ "dtype": 32049,
53
+ "finally": 32050,
54
+ "functools": 32052,
55
+ "gRPC": 32032,
56
+ "getattr": 32053,
57
+ "getter": 32054,
58
+ "globals()": 32068,
59
+ "hasattr": 32055,
60
+ "itertools": 32056,
61
+ "jsii": 32057,
62
+ "locals()": 32067,
63
+ "pathlib": 32058,
64
+ "pickle": 32061,
65
+ "plt": 32059,
66
+ "pulumi": 32060,
67
+ "rtype": 32033,
68
+ "setter": 32062,
69
+ "shutil": 32063,
70
+ "sklearn": 32064,
71
+ "stderr": 32065,
72
+ "torch": 32066,
73
+ "yield": 32069,
74
+ "▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁": 32072,
75
+ "▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁": 32073,
76
+ "▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁": 32074,
77
+ "▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁": 32075,
78
+ "▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁": 32076,
79
+ "▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁": 32077,
80
+ "▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁": 32078,
81
+ "▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁": 32079,
82
+ "▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁": 32080,
83
+ "▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁": 32081
84
+ }
beecoder_basic_test_notebook.ipynb ADDED
The diff for this file is too large to render. See raw diff
 
config.json ADDED
@@ -0,0 +1,27 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "pszemraj/smol_llama-101M-GQA-python",
3
+ "architectures": [
4
+ "LlamaForCausalLM"
5
+ ],
6
+ "attention_bias": false,
7
+ "bos_token_id": 1,
8
+ "eos_token_id": 2,
9
+ "hidden_act": "silu",
10
+ "hidden_size": 768,
11
+ "initializer_range": 0.02,
12
+ "intermediate_size": 3072,
13
+ "max_position_embeddings": 1024,
14
+ "model_type": "llama",
15
+ "num_attention_heads": 24,
16
+ "num_hidden_layers": 6,
17
+ "num_key_value_heads": 8,
18
+ "pretraining_tp": 1,
19
+ "rms_norm_eps": 1e-05,
20
+ "rope_scaling": null,
21
+ "rope_theta": 10000.0,
22
+ "tie_word_embeddings": false,
23
+ "torch_dtype": "float32",
24
+ "transformers_version": "4.33.3",
25
+ "use_cache": true,
26
+ "vocab_size": 32128
27
+ }
generation_config.json ADDED
@@ -0,0 +1,6 @@
 
 
 
 
 
 
 
1
+ {
2
+ "_from_model_config": true,
3
+ "bos_token_id": 1,
4
+ "eos_token_id": 2,
5
+ "transformers_version": "4.33.3"
6
+ }
model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e2c5e483eb866eef74eeebbd62608fa5843d6ab17b45be4883aa6b8ff8fc6abe
3
+ size 405058720
special_tokens_map.json ADDED
@@ -0,0 +1,23 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "bos_token": {
3
+ "content": "<s>",
4
+ "lstrip": false,
5
+ "normalized": false,
6
+ "rstrip": false,
7
+ "single_word": false
8
+ },
9
+ "eos_token": {
10
+ "content": "</s>",
11
+ "lstrip": false,
12
+ "normalized": false,
13
+ "rstrip": false,
14
+ "single_word": false
15
+ },
16
+ "unk_token": {
17
+ "content": "<unk>",
18
+ "lstrip": false,
19
+ "normalized": false,
20
+ "rstrip": false,
21
+ "single_word": false
22
+ }
23
+ }
tokenizer.model ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9e556afd44213b6bd1be2b850ebbbd98f5481437a8021afaf58ee7fb1818d347
3
+ size 499723
tokenizer_config.json ADDED
@@ -0,0 +1,719 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "add_bos_token": true,
3
+ "add_eos_token": false,
4
+ "added_tokens_decoder": {
5
+ "0": {
6
+ "content": "<unk>",
7
+ "lstrip": false,
8
+ "normalized": false,
9
+ "rstrip": false,
10
+ "single_word": false,
11
+ "special": true
12
+ },
13
+ "1": {
14
+ "content": "<s>",
15
+ "lstrip": false,
16
+ "normalized": false,
17
+ "rstrip": false,
18
+ "single_word": false,
19
+ "special": true
20
+ },
21
+ "2": {
22
+ "content": "</s>",
23
+ "lstrip": false,
24
+ "normalized": false,
25
+ "rstrip": false,
26
+ "single_word": false,
27
+ "special": true
28
+ },
29
+ "32000": {
30
+ "content": "**kwargs",
31
+ "lstrip": false,
32
+ "normalized": false,
33
+ "rstrip": false,
34
+ "single_word": true,
35
+ "special": false
36
+ },
37
+ "32001": {
38
+ "content": "@abstractmethod",
39
+ "lstrip": false,
40
+ "normalized": false,
41
+ "rstrip": false,
42
+ "single_word": true,
43
+ "special": false
44
+ },
45
+ "32002": {
46
+ "content": "@classmethod",
47
+ "lstrip": false,
48
+ "normalized": false,
49
+ "rstrip": false,
50
+ "single_word": true,
51
+ "special": false
52
+ },
53
+ "32003": {
54
+ "content": "@property",
55
+ "lstrip": false,
56
+ "normalized": false,
57
+ "rstrip": false,
58
+ "single_word": true,
59
+ "special": false
60
+ },
61
+ "32004": {
62
+ "content": "@staticmethod",
63
+ "lstrip": false,
64
+ "normalized": false,
65
+ "rstrip": false,
66
+ "single_word": true,
67
+ "special": false
68
+ },
69
+ "32005": {
70
+ "content": "__all__",
71
+ "lstrip": false,
72
+ "normalized": false,
73
+ "rstrip": false,
74
+ "single_word": true,
75
+ "special": false
76
+ },
77
+ "32006": {
78
+ "content": "__call__",
79
+ "lstrip": false,
80
+ "normalized": false,
81
+ "rstrip": false,
82
+ "single_word": true,
83
+ "special": false
84
+ },
85
+ "32007": {
86
+ "content": "__class__",
87
+ "lstrip": false,
88
+ "normalized": false,
89
+ "rstrip": false,
90
+ "single_word": true,
91
+ "special": false
92
+ },
93
+ "32008": {
94
+ "content": "__contains__",
95
+ "lstrip": false,
96
+ "normalized": false,
97
+ "rstrip": false,
98
+ "single_word": true,
99
+ "special": false
100
+ },
101
+ "32009": {
102
+ "content": "__delitem__",
103
+ "lstrip": false,
104
+ "normalized": false,
105
+ "rstrip": false,
106
+ "single_word": true,
107
+ "special": false
108
+ },
109
+ "32010": {
110
+ "content": "__del__",
111
+ "lstrip": false,
112
+ "normalized": false,
113
+ "rstrip": false,
114
+ "single_word": true,
115
+ "special": false
116
+ },
117
+ "32011": {
118
+ "content": "__dict__",
119
+ "lstrip": false,
120
+ "normalized": false,
121
+ "rstrip": false,
122
+ "single_word": true,
123
+ "special": false
124
+ },
125
+ "32012": {
126
+ "content": "__doc__",
127
+ "lstrip": false,
128
+ "normalized": false,
129
+ "rstrip": false,
130
+ "single_word": true,
131
+ "special": false
132
+ },
133
+ "32013": {
134
+ "content": "__file__",
135
+ "lstrip": false,
136
+ "normalized": false,
137
+ "rstrip": false,
138
+ "single_word": true,
139
+ "special": false
140
+ },
141
+ "32014": {
142
+ "content": "__future__",
143
+ "lstrip": false,
144
+ "normalized": false,
145
+ "rstrip": false,
146
+ "single_word": true,
147
+ "special": false
148
+ },
149
+ "32015": {
150
+ "content": "__getitem__",
151
+ "lstrip": false,
152
+ "normalized": false,
153
+ "rstrip": false,
154
+ "single_word": true,
155
+ "special": false
156
+ },
157
+ "32016": {
158
+ "content": "__init__",
159
+ "lstrip": false,
160
+ "normalized": false,
161
+ "rstrip": false,
162
+ "single_word": true,
163
+ "special": false
164
+ },
165
+ "32017": {
166
+ "content": "__len__",
167
+ "lstrip": false,
168
+ "normalized": false,
169
+ "rstrip": false,
170
+ "single_word": true,
171
+ "special": false
172
+ },
173
+ "32018": {
174
+ "content": "__main__",
175
+ "lstrip": false,
176
+ "normalized": false,
177
+ "rstrip": false,
178
+ "single_word": true,
179
+ "special": false
180
+ },
181
+ "32019": {
182
+ "content": "__name__",
183
+ "lstrip": false,
184
+ "normalized": false,
185
+ "rstrip": false,
186
+ "single_word": true,
187
+ "special": false
188
+ },
189
+ "32020": {
190
+ "content": "__package__",
191
+ "lstrip": false,
192
+ "normalized": false,
193
+ "rstrip": false,
194
+ "single_word": true,
195
+ "special": false
196
+ },
197
+ "32021": {
198
+ "content": "__props__",
199
+ "lstrip": false,
200
+ "normalized": false,
201
+ "rstrip": false,
202
+ "single_word": true,
203
+ "special": false
204
+ },
205
+ "32022": {
206
+ "content": "__repr__",
207
+ "lstrip": false,
208
+ "normalized": false,
209
+ "rstrip": false,
210
+ "single_word": true,
211
+ "special": false
212
+ },
213
+ "32023": {
214
+ "content": "__self__",
215
+ "lstrip": false,
216
+ "normalized": false,
217
+ "rstrip": false,
218
+ "single_word": true,
219
+ "special": false
220
+ },
221
+ "32024": {
222
+ "content": "__setitem__",
223
+ "lstrip": false,
224
+ "normalized": false,
225
+ "rstrip": false,
226
+ "single_word": true,
227
+ "special": false
228
+ },
229
+ "32025": {
230
+ "content": "__str__",
231
+ "lstrip": false,
232
+ "normalized": false,
233
+ "rstrip": false,
234
+ "single_word": true,
235
+ "special": false
236
+ },
237
+ "32026": {
238
+ "content": "*args",
239
+ "lstrip": false,
240
+ "normalized": false,
241
+ "rstrip": false,
242
+ "single_word": true,
243
+ "special": false
244
+ },
245
+ "32027": {
246
+ "content": "GraphQL",
247
+ "lstrip": false,
248
+ "normalized": false,
249
+ "rstrip": false,
250
+ "single_word": false,
251
+ "special": false
252
+ },
253
+ "32028": {
254
+ "content": "JWT",
255
+ "lstrip": false,
256
+ "normalized": false,
257
+ "rstrip": false,
258
+ "single_word": false,
259
+ "special": false
260
+ },
261
+ "32029": {
262
+ "content": "OAuth",
263
+ "lstrip": false,
264
+ "normalized": false,
265
+ "rstrip": false,
266
+ "single_word": false,
267
+ "special": false
268
+ },
269
+ "32030": {
270
+ "content": "DELETE",
271
+ "lstrip": false,
272
+ "normalized": false,
273
+ "rstrip": false,
274
+ "single_word": false,
275
+ "special": false
276
+ },
277
+ "32031": {
278
+ "content": "JOIN",
279
+ "lstrip": false,
280
+ "normalized": false,
281
+ "rstrip": false,
282
+ "single_word": false,
283
+ "special": false
284
+ },
285
+ "32032": {
286
+ "content": "gRPC",
287
+ "lstrip": false,
288
+ "normalized": false,
289
+ "rstrip": false,
290
+ "single_word": false,
291
+ "special": false
292
+ },
293
+ "32033": {
294
+ "content": "rtype",
295
+ "lstrip": true,
296
+ "normalized": true,
297
+ "rstrip": false,
298
+ "single_word": false,
299
+ "special": false
300
+ },
301
+ "32034": {
302
+ "content": "TODO",
303
+ "lstrip": true,
304
+ "normalized": true,
305
+ "rstrip": false,
306
+ "single_word": false,
307
+ "special": false
308
+ },
309
+ "32035": {
310
+ "content": "Notes",
311
+ "lstrip": true,
312
+ "normalized": true,
313
+ "rstrip": false,
314
+ "single_word": false,
315
+ "special": false
316
+ },
317
+ "32036": {
318
+ "content": ">>> ",
319
+ "lstrip": true,
320
+ "normalized": true,
321
+ "rstrip": false,
322
+ "single_word": false,
323
+ "special": false
324
+ },
325
+ "32037": {
326
+ "content": "Raises",
327
+ "lstrip": true,
328
+ "normalized": true,
329
+ "rstrip": false,
330
+ "single_word": false,
331
+ "special": false
332
+ },
333
+ "32038": {
334
+ "content": "AttributeError",
335
+ "lstrip": false,
336
+ "normalized": false,
337
+ "rstrip": false,
338
+ "single_word": false,
339
+ "special": false
340
+ },
341
+ "32039": {
342
+ "content": "Callable",
343
+ "lstrip": false,
344
+ "normalized": false,
345
+ "rstrip": false,
346
+ "single_word": false,
347
+ "special": false
348
+ },
349
+ "32040": {
350
+ "content": "ImportError",
351
+ "lstrip": false,
352
+ "normalized": false,
353
+ "rstrip": false,
354
+ "single_word": false,
355
+ "special": false
356
+ },
357
+ "32041": {
358
+ "content": "KeyError",
359
+ "lstrip": false,
360
+ "normalized": false,
361
+ "rstrip": false,
362
+ "single_word": false,
363
+ "special": false
364
+ },
365
+ "32042": {
366
+ "content": "NotImplementedError",
367
+ "lstrip": false,
368
+ "normalized": false,
369
+ "rstrip": false,
370
+ "single_word": false,
371
+ "special": false
372
+ },
373
+ "32043": {
374
+ "content": "README",
375
+ "lstrip": false,
376
+ "normalized": false,
377
+ "rstrip": false,
378
+ "single_word": false,
379
+ "special": false
380
+ },
381
+ "32044": {
382
+ "content": "RuntimeError",
383
+ "lstrip": false,
384
+ "normalized": false,
385
+ "rstrip": false,
386
+ "single_word": false,
387
+ "special": false
388
+ },
389
+ "32045": {
390
+ "content": "Tuple",
391
+ "lstrip": false,
392
+ "normalized": false,
393
+ "rstrip": false,
394
+ "single_word": false,
395
+ "special": false
396
+ },
397
+ "32046": {
398
+ "content": "TypeError",
399
+ "lstrip": false,
400
+ "normalized": false,
401
+ "rstrip": false,
402
+ "single_word": false,
403
+ "special": false
404
+ },
405
+ "32047": {
406
+ "content": "ValueError",
407
+ "lstrip": false,
408
+ "normalized": false,
409
+ "rstrip": false,
410
+ "single_word": false,
411
+ "special": false
412
+ },
413
+ "32048": {
414
+ "content": "asyncio",
415
+ "lstrip": false,
416
+ "normalized": false,
417
+ "rstrip": false,
418
+ "single_word": false,
419
+ "special": false
420
+ },
421
+ "32049": {
422
+ "content": "dtype",
423
+ "lstrip": false,
424
+ "normalized": false,
425
+ "rstrip": false,
426
+ "single_word": false,
427
+ "special": false
428
+ },
429
+ "32050": {
430
+ "content": "finally",
431
+ "lstrip": false,
432
+ "normalized": false,
433
+ "rstrip": false,
434
+ "single_word": false,
435
+ "special": false
436
+ },
437
+ "32051": {
438
+ "content": "cuda",
439
+ "lstrip": false,
440
+ "normalized": false,
441
+ "rstrip": false,
442
+ "single_word": false,
443
+ "special": false
444
+ },
445
+ "32052": {
446
+ "content": "functools",
447
+ "lstrip": false,
448
+ "normalized": false,
449
+ "rstrip": false,
450
+ "single_word": false,
451
+ "special": false
452
+ },
453
+ "32053": {
454
+ "content": "getattr",
455
+ "lstrip": false,
456
+ "normalized": false,
457
+ "rstrip": false,
458
+ "single_word": false,
459
+ "special": false
460
+ },
461
+ "32054": {
462
+ "content": "getter",
463
+ "lstrip": false,
464
+ "normalized": false,
465
+ "rstrip": false,
466
+ "single_word": false,
467
+ "special": false
468
+ },
469
+ "32055": {
470
+ "content": "hasattr",
471
+ "lstrip": false,
472
+ "normalized": false,
473
+ "rstrip": false,
474
+ "single_word": false,
475
+ "special": false
476
+ },
477
+ "32056": {
478
+ "content": "itertools",
479
+ "lstrip": false,
480
+ "normalized": false,
481
+ "rstrip": false,
482
+ "single_word": false,
483
+ "special": false
484
+ },
485
+ "32057": {
486
+ "content": "jsii",
487
+ "lstrip": false,
488
+ "normalized": false,
489
+ "rstrip": false,
490
+ "single_word": false,
491
+ "special": false
492
+ },
493
+ "32058": {
494
+ "content": "pathlib",
495
+ "lstrip": false,
496
+ "normalized": false,
497
+ "rstrip": false,
498
+ "single_word": false,
499
+ "special": false
500
+ },
501
+ "32059": {
502
+ "content": "plt",
503
+ "lstrip": false,
504
+ "normalized": false,
505
+ "rstrip": false,
506
+ "single_word": false,
507
+ "special": false
508
+ },
509
+ "32060": {
510
+ "content": "pulumi",
511
+ "lstrip": false,
512
+ "normalized": false,
513
+ "rstrip": false,
514
+ "single_word": false,
515
+ "special": false
516
+ },
517
+ "32061": {
518
+ "content": "pickle",
519
+ "lstrip": false,
520
+ "normalized": false,
521
+ "rstrip": false,
522
+ "single_word": false,
523
+ "special": false
524
+ },
525
+ "32062": {
526
+ "content": "setter",
527
+ "lstrip": false,
528
+ "normalized": false,
529
+ "rstrip": false,
530
+ "single_word": false,
531
+ "special": false
532
+ },
533
+ "32063": {
534
+ "content": "shutil",
535
+ "lstrip": false,
536
+ "normalized": false,
537
+ "rstrip": false,
538
+ "single_word": false,
539
+ "special": false
540
+ },
541
+ "32064": {
542
+ "content": "sklearn",
543
+ "lstrip": false,
544
+ "normalized": false,
545
+ "rstrip": false,
546
+ "single_word": false,
547
+ "special": false
548
+ },
549
+ "32065": {
550
+ "content": "stderr",
551
+ "lstrip": false,
552
+ "normalized": false,
553
+ "rstrip": false,
554
+ "single_word": false,
555
+ "special": false
556
+ },
557
+ "32066": {
558
+ "content": "torch",
559
+ "lstrip": false,
560
+ "normalized": false,
561
+ "rstrip": false,
562
+ "single_word": false,
563
+ "special": false
564
+ },
565
+ "32067": {
566
+ "content": "locals()",
567
+ "lstrip": false,
568
+ "normalized": false,
569
+ "rstrip": false,
570
+ "single_word": false,
571
+ "special": false
572
+ },
573
+ "32068": {
574
+ "content": "globals()",
575
+ "lstrip": false,
576
+ "normalized": false,
577
+ "rstrip": false,
578
+ "single_word": false,
579
+ "special": false
580
+ },
581
+ "32069": {
582
+ "content": "yield",
583
+ "lstrip": false,
584
+ "normalized": false,
585
+ "rstrip": false,
586
+ "single_word": false,
587
+ "special": false
588
+ },
589
+ "32070": {
590
+ "content": "<0x0A><0x0A>",
591
+ "lstrip": false,
592
+ "normalized": false,
593
+ "rstrip": false,
594
+ "single_word": false,
595
+ "special": false
596
+ },
597
+ "32071": {
598
+ "content": "<0x0A><0x0A><0x0A>",
599
+ "lstrip": false,
600
+ "normalized": false,
601
+ "rstrip": false,
602
+ "single_word": false,
603
+ "special": false
604
+ },
605
+ "32072": {
606
+ "content": "▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁",
607
+ "lstrip": false,
608
+ "normalized": false,
609
+ "rstrip": false,
610
+ "single_word": false,
611
+ "special": false
612
+ },
613
+ "32073": {
614
+ "content": "▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁",
615
+ "lstrip": false,
616
+ "normalized": false,
617
+ "rstrip": false,
618
+ "single_word": false,
619
+ "special": false
620
+ },
621
+ "32074": {
622
+ "content": "▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁",
623
+ "lstrip": false,
624
+ "normalized": false,
625
+ "rstrip": false,
626
+ "single_word": false,
627
+ "special": false
628
+ },
629
+ "32075": {
630
+ "content": "▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁",
631
+ "lstrip": false,
632
+ "normalized": false,
633
+ "rstrip": false,
634
+ "single_word": false,
635
+ "special": false
636
+ },
637
+ "32076": {
638
+ "content": "▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁",
639
+ "lstrip": false,
640
+ "normalized": false,
641
+ "rstrip": false,
642
+ "single_word": false,
643
+ "special": false
644
+ },
645
+ "32077": {
646
+ "content": "▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁",
647
+ "lstrip": false,
648
+ "normalized": false,
649
+ "rstrip": false,
650
+ "single_word": false,
651
+ "special": false
652
+ },
653
+ "32078": {
654
+ "content": "▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁",
655
+ "lstrip": false,
656
+ "normalized": false,
657
+ "rstrip": false,
658
+ "single_word": false,
659
+ "special": false
660
+ },
661
+ "32079": {
662
+ "content": "▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁",
663
+ "lstrip": false,
664
+ "normalized": false,
665
+ "rstrip": false,
666
+ "single_word": false,
667
+ "special": false
668
+ },
669
+ "32080": {
670
+ "content": "▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁",
671
+ "lstrip": false,
672
+ "normalized": false,
673
+ "rstrip": false,
674
+ "single_word": false,
675
+ "special": false
676
+ },
677
+ "32081": {
678
+ "content": "▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁",
679
+ "lstrip": false,
680
+ "normalized": false,
681
+ "rstrip": false,
682
+ "single_word": false,
683
+ "special": false
684
+ }
685
+ },
686
+ "bos_token": {
687
+ "__type": "AddedToken",
688
+ "content": "<s>",
689
+ "lstrip": false,
690
+ "normalized": true,
691
+ "rstrip": false,
692
+ "single_word": false
693
+ },
694
+ "clean_up_tokenization_spaces": false,
695
+ "eos_token": {
696
+ "__type": "AddedToken",
697
+ "content": "</s>",
698
+ "lstrip": false,
699
+ "normalized": true,
700
+ "rstrip": false,
701
+ "single_word": false
702
+ },
703
+ "legacy": false,
704
+ "model_max_length": 1000000000000000019884624838656,
705
+ "pad_token": null,
706
+ "padding_side": "right",
707
+ "sp_model_kwargs": {},
708
+ "spaces_between_special_tokens": false,
709
+ "tokenizer_class": "LlamaTokenizer",
710
+ "unk_token": {
711
+ "__type": "AddedToken",
712
+ "content": "<unk>",
713
+ "lstrip": false,
714
+ "normalized": true,
715
+ "rstrip": false,
716
+ "single_word": false
717
+ },
718
+ "use_default_system_prompt": true
719
+ }