architecture: GPT2LMHeadModel conversion_date: '2025-01-09' format: safetensors max_position_embeddings: 1024 model_name: RadonSAI-Small model_type: gpt2 parameters: 124M source_model: gpt2 vocab_size: 50257