model: name: "gpt2" max_length: 128 batch_size: 16 learning_rate: 2e-5 num_train_epochs: 3 languages: - YORUBA - IGBO - HAUSA