|
@@ -5,7 +5,7 @@
|
|
|
"id": "35a81947",
|
|
|
"metadata": {},
|
|
|
"source": [
|
|
|
- "[](https://github.com/meta-llama/llama-cookbook/blob/ibm-wxai/3p-integrations/ibm/Get%20Started%20with%20watsonx.ai%20%26%20Llama.ipynb)\n"
|
|
|
+ "[](https://colab.research.google.com/github/meta-llama/llama-cookbook/blob/ibm-wxai/3p-integrations/ibm/Get%20Started%20with%20watsonx.ai%20%26%20Llama.ipynb)\n"
|
|
|
]
|
|
|
},
|
|
|
{
|
|
@@ -23,7 +23,7 @@
|
|
|
"- Understanding key parameters\n",
|
|
|
"- Building practical examples\n",
|
|
|
"\n",
|
|
|
- "By the end of this notebook, you'll be comfortable using Llama models for various text generation tasks on watsonx.ai.\n",
|
|
|
+ "By the end of this notebook, you'll be comfortable using Llama models for various text generation tasks on watsonx.ai!\n",
|
|
|
"\n",
|
|
|
"## Prerequisites\n",
|
|
|
"\n",
|