File size: 1,315 Bytes
			
			b6a2fca 14e2067  | 
								1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87  | 
								{
  "_name_or_path": "facebook/hiera-huge-224-hf",
  "architectures": [
    "HieraModel"
  ],
  "decoder_depth": null,
  "decoder_hidden_size": null,
  "decoder_num_heads": null,
  "depths": [
    2,
    6,
    36,
    4
  ],
  "drop_path_rate": 0.0,
  "embed_dim": 256,
  "embed_dim_multiplier": 2.0,
  "hidden_act": "gelu",
  "hidden_size": 2048,
  "image_size": [
    224,
    224
  ],
  "initializer_range": 0.02,
  "layer_norm_eps": 1e-06,
  "layer_norm_init": 1.0,
  "mask_ratio": 0.6,
  "masked_unit_attention": [
    true,
    true,
    false,
    false
  ],
  "masked_unit_size": [
    8,
    8
  ],
  "mlp_ratio": 4.0,
  "model_type": "hiera",
  "norm_pix_loss": true,
  "normalize_pixel_loss": true,
  "num_channels": 3,
  "num_heads": [
    4,
    8,
    16,
    32
  ],
  "num_layers": 4,
  "num_query_pool": 3,
  "out_features": [
    "stage4"
  ],
  "out_indices": [
    4
  ],
  "patch_padding": [
    3,
    3
  ],
  "patch_size": [
    7,
    7
  ],
  "patch_stride": [
    4,
    4
  ],
  "query_stride": [
    2,
    2
  ],
  "stage_names": [
    "stem",
    "stage1",
    "stage2",
    "stage3",
    "stage4"
  ],
  "transformers_version": "4.43.4",
  "use_separate_position_embedding": false,
  "transformers.js_config": {
    "use_external_data_format": {
      "model.onnx": true
    }
  }
} |