Okay let's turn no_grad back on. We'll worry about that when tinygrad training works

This commit is contained in:
Nel Nibcord
2024-12-06 04:49:46 -08:00
parent b7bbda3348
commit bcf87e79b7

View File

@@ -16,7 +16,7 @@ from .losses import length_masked_ce_loss
from collections import OrderedDict
import asyncio
Tensor.no_grad = False
Tensor.no_grad = True
# default settings
TEMPERATURE = int(os.getenv("TEMPERATURE", 0.85))
TOP_K = 25