Spaces:
Running
Running
talk-llama : increase context to 2048
Browse files
examples/talk-llama/talk-llama.cpp
CHANGED
|
@@ -239,7 +239,7 @@ int main(int argc, char ** argv) {
|
|
| 239 |
auto lparams = llama_context_default_params();
|
| 240 |
|
| 241 |
// tune these to your liking
|
| 242 |
-
lparams.n_ctx =
|
| 243 |
lparams.seed = 1;
|
| 244 |
lparams.f16_kv = true;
|
| 245 |
lparams.n_parts = params.n_parts_llama;
|
|
|
|
| 239 |
auto lparams = llama_context_default_params();
|
| 240 |
|
| 241 |
// tune these to your liking
|
| 242 |
+
lparams.n_ctx = 2048;
|
| 243 |
lparams.seed = 1;
|
| 244 |
lparams.f16_kv = true;
|
| 245 |
lparams.n_parts = params.n_parts_llama;
|