{ "absolute_positional_embedding_type": null, "add_marker_tokens": true, "architectures": [ "LirDprTiteModel" ], "backbone_model_type": "tite", "doc_length": 256, "doc_pooling_strategy": "first", "dropout_prob": 0.1, "embedding_dim": null, "hidden_act": "gelu_pytorch_tanh", "hidden_sizes": [ 768, 768, 768, 768, 768, 768, 768, 768, 768, 768, 768, 768 ], "initializer_range": 0.02, "intermediate_sizes": [ 3072, 3072, 3072, 3072, 3072, 3072, 3072, 3072, 3072, 3072, 3072, 3072 ], "kernel_sizes": [ null, null, null, 2, 2, 2, 2, 2, 2, 2, 2, 2 ], "layer_norm_eps": 1e-12, "max_position_embeddings": 512, "model_type": "lir-dpr", "norm_location": "post", "norm_type": "layer", "normalize": false, "num_attention_heads": [ 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12 ], "num_hidden_layers": 12, "pad_token_id": null, "pooling_implementation": "triton", "pooling_location": "intra", "positional_embedding_type": null, "projection": null, "query_length": 32, "query_pooling_strategy": "first", "relative_positional_embedding_type": "rotary", "rope_implementation": "eager", "rotary_interleaved": true, "save_step": 10006, "similarity_function": "dot", "sparsification": null, "strides": [ null, null, null, 2, 2, 2, 2, 2, 2, 2, 2, 2 ], "torch_dtype": "float32", "transformers_version": "4.52.4", "vocab_size": 30528 }