瀏覽代碼

adding llama options

Hamid Shojanazeri 1 年之前
父節點
當前提交
a02c243c52
共有 1 個文件被更改,包括 3 次插入3 次删除
  1. 3 3
      tutorials/chatbot/data_pipelines/generate_question_answers.py

+ 3 - 3
tutorials/chatbot/data_pipelines/generate_question_answers.py

@@ -43,8 +43,8 @@ def parse_arguments(context):
     )
     parser.add_argument(
         "-m", "--model",
-        choices=["gpt-3.5-turbo-16k", "gpt-3.5-turbo-0125"],
-        default="gpt-3.5-turbo-16k",
+        choices=["llama-2-70b-chat-fp16", "llama-2-13b-chat-fp16"],
+        default="llama-2-70b-chat-fp16",
         help="Select the model to use for generation."
     )
     return parser.parse_args()
@@ -58,4 +58,4 @@ if __name__ == "__main__":
     context["model"] = args.model
 
     logging.info(f"Configuration loaded. Generating {args.total_questions} question/answer pairs using model '{args.model}'.")
-    asyncio.run(main(context))
+    asyncio.run(main(co