Browse Source

Update utils_llama.py

Allen 1 year ago
parent
commit
0824531973
1 changed files with 0 additions and 3 deletions
  1. 0 3
      research/long-context-llama/H2O/utils_llama.py

+ 0 - 3
research/long-context-llama/H2O/utils_llama.py

@@ -217,9 +217,6 @@ class H2OLlamaAttention(nn.Module):
         if not output_attentions:
             attn_weights = None
         
-        if self.layer_idx == 0:
-            print(past_key_value.key_cache[0].shape, hidden_states.shape, past_key_value.value_cache[0].shape, past_key_value.accumulated_attention_scores[0][0,0,0].item())
-
         return attn_output, attn_weights, past_key_value