Browse Source

Created using Colaboratory

Maxime Labonne 1 năm trước cách đây
mục cha
commit
20f0fc67e9
1 tập tin đã thay đổi với 7 bổ sung7 xóa
  1. 7 7
      Quantize_Llama_2_models_using_ggml.ipynb

+ 7 - 7
Quantize_Llama_2_models_using_ggml.ipynb

@@ -5,7 +5,7 @@
     "colab": {
       "provenance": [],
       "gpuType": "T4",
-      "authorship_tag": "ABX9TyP56xEUptBvw5HFOchVYu+H",
+      "authorship_tag": "ABX9TyMEXNClg5PlR3HYYJSGabFJ",
       "include_colab_link": true
     },
     "kernelspec": {
@@ -1066,7 +1066,7 @@
         "id": "fD24jJxq7t3k",
         "outputId": "94954934-0829-44e9-a5e5-262c17e162d0"
       },
-      "execution_count": 51,
+      "execution_count": null,
       "outputs": [
         {
           "output_type": "stream",
@@ -1722,7 +1722,7 @@
         "id": "vNPL9WYg78l-",
         "outputId": "3c3e7d2f-f0de-429d-fd97-dab480bc514a"
       },
-      "execution_count": 54,
+      "execution_count": null,
       "outputs": [
         {
           "output_type": "stream",
@@ -2112,7 +2112,7 @@
       "source": [
         "## Push to hub\n",
         "\n",
-        "To push your model to the hub, run the following blocks. It will create a new repo with the \"-GGML\" suffix. Don't forget to change the `username` variable in the following block."
+        "To push your model to the hub, run the following blocks. It will create a new repo with the \"-GGUF\" suffix. Don't forget to change the `username` variable in the following block."
       ],
       "metadata": {
         "id": "Ar8pO7bb80US"
@@ -2170,7 +2170,7 @@
         "id": "UOyKfUD-8jmh",
         "outputId": "3c8df47b-f350-4251-a19f-4b9fb1116381"
       },
-      "execution_count": 55,
+      "execution_count": null,
       "outputs": [
         {
           "output_type": "stream",
@@ -2202,11 +2202,11 @@
         "api = HfApi()\n",
         "\n",
         "\n",
-        "create_repo(repo_id = f\"{username}/{MODEL_NAME}-GGML\", repo_type=\"model\", exist_ok=True)\n",
+        "create_repo(repo_id = f\"{username}/{MODEL_NAME}-GGUF\", repo_type=\"model\", exist_ok=True)\n",
         "\n",
         "api.upload_folder(\n",
         "    folder_path=MODEL_NAME,\n",
-        "    repo_id=f\"{username}/{MODEL_NAME}-GGML\",\n",
+        "    repo_id=f\"{username}/{MODEL_NAME}-GGUF\",\n",
         "    allow_patterns=f\"*{GGML_VERSION}*\",\n",
         ")"
       ],