timm
/

Image Classification
timm
PyTorch
Safetensors
Ross Wightman
Add model weights and args
34a3fcb
raw
history blame
1.95 kB
aa: rand-m9-inc1-mstd101
amp: true
aot_autograd: false
apex_amp: false
aug_repeats: 0
aug_splits: 0
batch_size: 64
bce_loss: false
bce_target_thresh: null
bn_eps: null
bn_momentum: null
channels_last: false
checkpoint_hist: 10
class_map: ''
clip_grad: 3.0
clip_mode: norm
color_jitter: 0.4
cooldown_epochs: 10
crop_pct: null
cutmix: 0.0
cutmix_minmax: null
data_dir: /data/tfds
dataset: tfds/imagenet12k
dataset_download: false
decay_epochs: 100
decay_milestones:
- 30
- 60
decay_rate: 0.1
dist_bn: reduce
drop: 0.0
drop_block: null
drop_connect: null
drop_path: 0.2
epoch_repeats: 0.0
epochs: 80
eval_metric: top1
experiment: ''
fast_norm: false
fuser: ''
gp: null
grad_checkpointing: false
hflip: 0.5
img_size: null
initial_checkpoint: ''
input_size: null
interpolation: ''
jsd_loss: false
layer_decay: 0.8
local_rank: 0
log_interval: 50
log_wandb: false
lr: 0.0005
lr_cycle_decay: 0.5
lr_cycle_limit: 1
lr_cycle_mul: 1.0
lr_k_decay: 1.0
lr_noise: null
lr_noise_pct: 0.67
lr_noise_std: 1.0
mean: null
min_lr: 5.0e-07
mixup: 0.0
mixup_mode: batch
mixup_off_epoch: 0
mixup_prob: 1.0
mixup_switch_prob: 0.5
model: vit_large_patch14_clip_224.laion2b
model_ema: true
model_ema_decay: 0.9999
model_ema_force_cpu: false
momentum: 0.9
native_amp: false
no_aug: false
no_ddp_bb: false
no_prefetcher: false
no_resume_opt: false
num_classes: 11821
opt: adamw
opt_betas: null
opt_eps: null
output: ''
patience_epochs: 10
pin_mem: false
pretrained: true
ratio:
- 0.75
- 1.3333333333333333
recount: 1
recovery_interval: 0
remode: pixel
reprob: 0.3
resplit: false
resume: ''
save_images: false
scale:
- 0.08
- 1.0
sched: cosine
seed: 42
smoothing: 0.1
split_bn: false
start_epoch: null
std: null
sync_bn: false
torchscript: false
train_interpolation: random
train_split: train
tta: 0
use_multi_epochs_loader: false
val_split: validation
validation_batch_size: null
vflip: 0.0
warmup_epochs: 10
warmup_lr: 1.0e-06
weight_decay: 0.02
worker_seeding: all
workers: 8