From faed129586a0b58e3ba27770060e9ce269c4b81f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Julian=20M=C3=BCller=20=28ChaoticByte=29?= Date: Tue, 30 May 2023 19:07:23 +0200 Subject: [PATCH] Added a profile for Vicuna v1.1 --- README.md | 5 +++-- profiles/vicuna-v1.1.json | 8 ++++++++ 2 files changed, 11 insertions(+), 2 deletions(-) create mode 100644 profiles/vicuna-v1.1.json diff --git a/README.md b/README.md index b74ef18..e996778 100644 --- a/README.md +++ b/README.md @@ -15,9 +15,10 @@ For memory and disk requirements for the different models, see [llama.cpp - Memo ## Supported Models - [🐨 Koala](https://bair.berkeley.edu/blog/2023/04/03/koala/) -- [🦙 Vicuna v0](https://lmsys.org/blog/2023-03-30-vicuna/) +- [🦙 Vicuna v.0](https://lmsys.org/blog/2023-03-30-vicuna/) +- [🦙 Vicuna v.1.1](https://lmsys.org/blog/2023-03-30-vicuna/) -see `./profiles/` +(see `./profiles/`) ## Usage diff --git a/profiles/vicuna-v1.1.json b/profiles/vicuna-v1.1.json new file mode 100644 index 0000000..73829e3 --- /dev/null +++ b/profiles/vicuna-v1.1.json @@ -0,0 +1,8 @@ +{ + "name": "Vicuna", + "conversation_prefix": "A chat between a curious user and a helpful AI assistant.\n\n", + "user_keyword": "USER:", + "assistant_keyword": "ASSISTANT:", + "separator": "\n", + "stop_sequences": [""] +} \ No newline at end of file