{ "vocab_size_multiple": 8, "tensorboard_log_dir_dated": "tensorboard/Oct-11_12-25-44", "tgt_vocab": "en.eole.vocab", "n_sample": 0, "transforms": [ "sentencepiece", "filtertoolong" ], "valid_metrics": [ "BLEU" ], "src_vocab_size": 32000, "tensorboard": true, "tgt_vocab_size": 32000, "tensorboard_log_dir": "tensorboard", "report_every": 100, "overwrite": true, "share_vocab": false, "src_vocab": "ro.eole.vocab", "seed": 1234, "save_data": "data", "training": { "accum_count": [ 10 ], "batch_type": "tokens", "average_decay": 0.0001, "warmup_steps": 2000, "learning_rate": 2.0, "adam_beta2": 0.998, "compute_dtype": "torch.float16", "decay_method": "noam", "dropout": [ 0.1 ], "max_grad_norm": 0.0, "bucket_size": 128000, "prefetch_factor": 32, "gpu_ranks": [ 0 ], "save_checkpoint_steps": 5000, "accum_steps": [ 0 ], "model_path": "quickmt-ro-en-eole-model", "normalization": "tokens", "attention_dropout": [ 0.1 ], "num_workers": 0, "label_smoothing": 0.1, "param_init_method": "xavier_uniform", "valid_steps": 5000, "keep_checkpoint": 4, "world_size": 1, "batch_size_multiple": 8, "batch_size": 12000, "dropout_steps": [ 0 ], "valid_batch_size": 2048, "optim": "adamw", "train_steps": 100000 }, "model": { "architecture": "transformer", "heads": 8, "transformer_ff": 4096, "position_encoding_type": "SinusoidalInterleaved", "share_embeddings": false, "hidden_size": 1024, "share_decoder_embeddings": true, "encoder": { "heads": 8, "transformer_ff": 4096, "position_encoding_type": "SinusoidalInterleaved", "src_word_vec_size": 1024, "n_positions": null, "encoder_type": "transformer", "layers": 8, "hidden_size": 1024 }, "decoder": { "tgt_word_vec_size": 1024, "heads": 8, "transformer_ff": 4096, "decoder_type": "transformer", "position_encoding_type": "SinusoidalInterleaved", "n_positions": null, "layers": 2, "hidden_size": 1024 }, "embeddings": { "position_encoding_type": "SinusoidalInterleaved", "word_vec_size": 1024, "tgt_word_vec_size": 1024, "src_word_vec_size": 1024 } }, "transforms_configs": { "filtertoolong": { "src_seq_length": 256, "tgt_seq_length": 256 }, "sentencepiece": { "tgt_subword_model": "${MODEL_PATH}/en.spm.model", "src_subword_model": "${MODEL_PATH}/ro.spm.model" } }, "data": { "corpus_1": { "path_sco": "hf://quickmt/quickmt-train.ro-en/sco", "path_tgt": "hf://quickmt/quickmt-train.ro-en/en", "path_align": null, "path_src": "hf://quickmt/quickmt-train.ro-en/ro", "transforms": [ "sentencepiece", "filtertoolong" ] }, "valid": { "path_align": null, "path_tgt": "valid.en", "path_src": "valid.ro", "transforms": [ "sentencepiece", "filtertoolong" ] } } }