|
@@ -15,7 +15,7 @@
|
|
|
"source": [
|
|
|
"# Llama Guard 3 Customization: Taxonomy Customization, Zero/Few-shot prompting, Evaluation and Fine Tuning \n",
|
|
|
"\n",
|
|
|
- "<a target=\"_blank\" href=\"https://colab.research.google.com/githubmeta-llama/llama-recipes/blob/main/recipes/responsible_ai/llama_guard/llama_guard_customization_via_prompting_and_fine_tuning.ipynb\">\n",
|
|
|
+ "<a target=\"_blank\" href=\"https://colab.research.google.com/github/meta-llama/llama-recipes/blob/main/recipes/responsible_ai/llama_guard/llama_guard_customization_via_prompting_and_fine_tuning.ipynb\">\n",
|
|
|
" <img src=\"https://colab.research.google.com/assets/colab-badge.svg\" alt=\"Open In Colab\"/>\n",
|
|
|
"</a>\n",
|
|
|
"\n",
|