terraform.tfvars 666 B

123456789101112131415161718
  1. # Example terraform.tfvars for minimal Amazon SageMaker deployment
  2. # Copy this file to terraform.tfvars and customize as needed
  3. # AWS Configuration
  4. aws_region = "us-west-2"
  5. # Project Configuration
  6. project_name = "my-llama-api"
  7. environment = "dev"
  8. # Model Configuration
  9. model_image_uri = "763104351884.dkr.ecr.us-west-2.amazonaws.com/huggingface-pytorch-inference:2.6.0-transformers4.51.3-gpu-py312-cu124-ubuntu22.04"
  10. model_data_s3_path = "s3://llama-model-demo-bucket/model.tar.gz"
  11. model_name = "Llama-3.2-1B-Instruct"
  12. # Instance Configuration
  13. instance_type = "ml.p4d.24xlarge" # GPU instance for Llama models, will fit larger models
  14. initial_instance_count = 1