594zyc's picture
Add model
f39ace3
{
"_name_or_path": "meta-llama/Meta-Llama-3.1-8B-Instruct",
"architectures": [
"ProActLlamaForCausalLM"
],
"attention_bias": false,
"attention_dropout": 0.0,
"attn_implementation": "flash_attention_2",
"binary_decision_head_type": "linear",
"binary_loss_weight": 0.1,
"bor_token_id": 128006,
"bos_token_id": 128000,
"chat_formatter_cls": "LLaMA3MultimodalChat",
"eos_loss_weight": 1.0,
"eos_token": "<|eot_id|>",
"eos_token_id": 128009,
"exceed_context_handling": "drop_all",
"hidden_act": "silu",
"hidden_size": 4096,
"ignore_id": -100,
"img_patch_token_layer": -2,
"img_patch_token_size": 2,
"img_resolution": null,
"img_sep_token": "",
"img_sep_token_id": null,
"img_token": "<image>",
"img_token_id": 128256,
"initializer_range": 0.02,
"intermediate_size": 14336,
"llm_pretrained": "meta-llama/Meta-Llama-3.1-8B-Instruct",
"max_position_embeddings": 131072,
"max_seq_len": 4096,
"mlp_bias": false,
"model_type": "llama",
"num_attention_heads": 32,
"num_hidden_layers": 32,
"num_key_value_heads": 8,
"padding_side": "right",
"pretraining_tp": 1,
"rms_norm_eps": 1e-05,
"rope_scaling": {
"factor": 8.0,
"high_freq_factor": 4.0,
"low_freq_factor": 1.0,
"original_max_position_embeddings": 8192,
"rope_type": "llama3"
},
"rope_theta": 500000.0,
"tie_word_embeddings": false,
"torch_dtype": "bfloat16",
"training_args": {
"accelerator_config": {
"dispatch_batches": null,
"even_batches": true,
"gradient_accumulation_kwargs": null,
"non_blocking": false,
"split_batches": false,
"use_seedable_sampler": true
},
"adafactor": false,
"adam_beta1": 0.9,
"adam_beta2": 0.999,
"adam_epsilon": 1e-08,
"auto_find_batch_size": false,
"batch_eval_metrics": false,
"bf16": true,
"bf16_full_eval": false,
"data_root_dir": "/fsx_0/user/imzyc/processed_data/",
"data_seed": null,
"dataloader_drop_last": false,
"dataloader_num_workers": 2,
"dataloader_persistent_workers": false,
"dataloader_pin_memory": true,
"dataloader_prefetch_factor": 2,
"ddp_backend": null,
"ddp_broadcast_buffers": null,
"ddp_bucket_cap_mb": null,
"ddp_find_unused_parameters": null,
"ddp_timeout": 1800,
"debug": [],
"deepspeed": "deepspeed/zero2_offload.json",
"disable_tqdm": false,
"dispatch_batches": null,
"do_eval": true,
"do_predict": false,
"do_train": false,
"eval_accumulation_steps": null,
"eval_datasets": "sthsthv2/narration_val_L4096_I5,ego4d/dialog-klg-sum_val_L4096_I5,holoassist/dialog-klg-sum_val_L4096_I5,ego4d/narration_val_L4096_I5,ego4d/summary_val_L4096_I5",
"eval_delay": 0,
"eval_do_concat_batches": true,
"eval_on_start": false,
"eval_steps": 400,
"eval_strategy": "steps",
"eval_use_gather_object": false,
"evaluation_strategy": "steps",
"finetune_modules": "mm_projector",
"fp16": false,
"fp16_backend": "auto",
"fp16_full_eval": false,
"fp16_opt_level": "O1",
"fsdp": [],
"fsdp_config": {
"min_num_params": 0,
"xla": false,
"xla_fsdp_grad_ckpt": false,
"xla_fsdp_v2": false
},
"fsdp_min_num_params": 0,
"fsdp_transformer_layer_cls_to_wrap": null,
"full_determinism": false,
"gradient_accumulation_steps": 1,
"gradient_checkpointing": true,
"gradient_checkpointing_kwargs": null,
"greater_is_better": null,
"group_by_length": false,
"half_precision_backend": "auto",
"hub_always_push": false,
"hub_model_id": null,
"hub_private_repo": false,
"hub_strategy": "every_save",
"hub_token": "<HUB_TOKEN>",
"ignore_data_skip": false,
"include_inputs_for_metrics": false,
"include_num_input_tokens_seen": false,
"include_tokens_per_second": false,
"is_debug": false,
"jit_mode_eval": false,
"label_names": null,
"label_smoothing_factor": 0.0,
"learning_rate": 0.0002,
"length_column_name": "length",
"llm_train_mode": "lora",
"load_best_model_at_end": false,
"local_rank": 0,
"log_level": "passive",
"log_level_replica": "warning",
"log_on_each_node": true,
"logging_dir": "/fsx_0/user/imzyc/proact_exps/20240822-L4096-I5-ep4-NOSEP-nr0.1-klgmix-1s-lora-bs384-debug/runs/Aug22_02-03-56_h100-st-p548xlarge-5",
"logging_first_step": false,
"logging_nan_inf_filter": true,
"logging_steps": 10,
"logging_strategy": "steps",
"lora_alpha": 256,
"lora_modules": "model.*(q_proj|k_proj|v_proj|o_proj|gate_proj|up_proj|down_proj)|lm_head$",
"lora_r": 128,
"lr_scheduler_kwargs": {},
"lr_scheduler_type": "cosine",
"max_grad_norm": 1.0,
"max_steps": -1,
"metric_for_best_model": null,
"mp_parameters": "",
"neftune_noise_alpha": null,
"neg_frame_sampling_rate": 0.1,
"no_cuda": false,
"num_train_epochs": 4.0,
"optim": "adamw_torch",
"optim_args": null,
"optim_target_modules": null,
"output_dir": "/fsx_0/user/imzyc/proact_exps/20240822-L4096-I5-ep4-NOSEP-nr0.1-klgmix-1s-lora-bs384-debug",
"overwrite_output_dir": false,
"past_index": -1,
"per_device_eval_batch_size": 4,
"per_device_train_batch_size": 4,
"per_gpu_eval_batch_size": null,
"per_gpu_train_batch_size": null,
"prediction_loss_only": false,
"push_to_hub": false,
"push_to_hub_model_id": null,
"push_to_hub_organization": null,
"push_to_hub_token": "<PUSH_TO_HUB_TOKEN>",
"ray_scope": "last",
"remove_unused_columns": true,
"report_to": [
"tensorboard",
"wandb"
],
"restore_callback_states_from_checkpoint": false,
"resume_from_checkpoint": null,
"run_name": "20240822-L4096-I5-ep4-NOSEP-nr0.1-klgmix-1s-lora-bs384-debug",
"save_on_each_node": false,
"save_only_model": false,
"save_safetensors": true,
"save_steps": 500,
"save_strategy": "epoch",
"save_total_limit": 1,
"seed": 42,
"skip_memory_metrics": true,
"split_batches": null,
"tf32": true,
"torch_compile": false,
"torch_compile_backend": null,
"torch_compile_mode": null,
"torch_empty_cache_steps": null,
"torchdynamo": null,
"tpu_metrics_debug": false,
"tpu_num_cores": null,
"train_datasets": "ego4d/narration_train_L4096_I5,sthsthv2/narration_train_L4096_I5@10,llava/caption_train_L4096_I5@2,egoobjects/detection_train_L4096_I5@20,ego4d/summary_train_L4096_I5@2,holoassist/summary_train_L4096_I5@2,epickitchens/summary_train_L4096_I5@2,egoexolearn/summary_train_L4096_I5@2,wtag/summary_train_L4096_I5@2,assembly101/summary_train_L4096_I5@2,ego4d/dialog-klg-sum_train_L4096_I5@2,holoassist/dialog-klg-sum_train_L4096_I5@2,epickitchens/dialog-klg-sum_train_L4096_I5@2,egoexolearn/dialog-klg-sum_train_L4096_I5@2,wtag/dialog-klg-sum_train_L4096_I5@5,assembly101/dialog-klg-sum_train_L4096_I5@2",
"use_cpu": false,
"use_ipex": false,
"use_legacy_prediction_loop": false,
"use_mps_device": false,
"use_pose": false,
"warmup_ratio": 0.0,
"warmup_steps": 100,
"weight_decay": 0.0
},
"transformers_version": "4.43.1",
"use_binary_decision_head": false,
"use_cache": true,
"use_img_cls_token": true,
"use_pose": false,
"vision_hidden_size": 1152,
"vision_pretrained": "google/siglip-so400m-patch14-384",
"vocab_size": 128256,
"w2t_logit_weight": 1.0
}