gated-david / best_model.json
AbstractPhil's picture
Update metrics - Run 20251104_154540
82174e8 verified
raw
history blame
1.52 kB
{
"model_name": "David-decoupled-cantor_scale",
"run_id": "20251104_154540",
"timestamp": "2025-11-04T15:57:33.702682",
"best_val_acc": 78.904,
"best_epoch": 4,
"final_train_acc": 86.63156325443911,
"final_train_loss": 1.4550095543688029,
"scale_accuracies": {
"256": 74.616,
"512": 77.184,
"768": 77.982,
"1024": 77.986,
"2048": 77.906,
"4096": 77.97
},
"architecture": {
"preset": "clip_vit_b16_cantor_big_window",
"sharing_mode": "decoupled",
"fusion_mode": "cantor_scale",
"scales": [
256,
512,
768,
1024,
2048,
4096
],
"feature_dim": 512,
"num_classes": 1000,
"use_belly": true,
"belly_expand": 2.0
},
"training": {
"dataset": "AbstractPhil/imagenet-clip-features-orderly",
"model_variant": "clip_vit_b16",
"num_epochs": 5,
"batch_size": 512,
"learning_rate": 0.001,
"rose_weight": "0.1\u21920.5",
"cayley_loss": false,
"optimizer": "AdamW",
"scheduler": "cosine_restarts"
},
"files": {
"weights_safetensors": "weights/David-decoupled-cantor_scale/20251104_154540/best_model_acc78.90.safetensors",
"weights_pytorch": "weights/David-decoupled-cantor_scale/20251104_154540/best_model.pth",
"config": "weights/David-decoupled-cantor_scale/20251104_154540/david_config.json",
"training_config": "weights/David-decoupled-cantor_scale/20251104_154540/train_config.json",
"tensorboard": "runs/David-decoupled-cantor_scale/20251104_154540/"
}
}