testing adadelta optimizer

This commit is contained in:
2025-12-18 22:24:01 -05:00
parent 5c89149388
commit 0385b0acc8

View File

@@ -11,12 +11,12 @@ DEVICE = torch.accelerator.current_accelerator() if torch.accelerator.is_availab
# Valves
DIMENSIONS = 1
TRAIN_STEPS = 20000
TRAIN_STEPS = 25000
TRAIN_BATCHSZ = 16384
TRAIN_PROGRESS = 500
TRAIN_PROGRESS = 100
BATCH_LOWER = -512.0
BATCH_UPPER = 512.0
DO_VERBOSE_EARLY_TRAIN = True
DO_VERBOSE_EARLY_TRAIN = False
def get_torch_info():
log.info("PyTorch Version: %s", torch.__version__)
@@ -86,7 +86,8 @@ def training_entry():
set_seed(0)
model = PairwiseComparator(d=DIMENSIONS).to(DEVICE)
opt = torch.optim.AdamW(model.parameters(), lr=2e-3)
# opt = torch.optim.AdamW(model.parameters(), lr=2e-3)
opt = torch.optim.Adadelta(model.parameters(), lr=1.0)
# 4) Train
for step in range(TRAIN_STEPS):