Upload 7 files
Browse files- config.json +3 -3
- merges.txt +0 -0
    	
        config.json
    CHANGED
    
    | @@ -1,5 +1,5 @@ | |
| 1 | 
             
            {
         | 
| 2 | 
            -
              "_name_or_path": " | 
| 3 | 
             
              "architectures": [
         | 
| 4 | 
             
                "CLIPModel"
         | 
| 5 | 
             
              ],
         | 
| @@ -26,7 +26,7 @@ | |
| 26 | 
             
                "finetuning_task": null,
         | 
| 27 | 
             
                "forced_bos_token_id": null,
         | 
| 28 | 
             
                "forced_eos_token_id": null,
         | 
| 29 | 
            -
                "hidden_act": " | 
| 30 | 
             
                "hidden_size": 512,
         | 
| 31 | 
             
                "id2label": {
         | 
| 32 | 
             
                  "0": "LABEL_0",
         | 
| @@ -51,7 +51,7 @@ | |
| 51 | 
             
                "num_attention_heads": 8,
         | 
| 52 | 
             
                "num_beam_groups": 1,
         | 
| 53 | 
             
                "num_beams": 1,
         | 
| 54 | 
            -
                "num_hidden_layers":  | 
| 55 | 
             
                "num_return_sequences": 1,
         | 
| 56 | 
             
                "output_attentions": false,
         | 
| 57 | 
             
                "output_hidden_states": false,
         | 
|  | |
| 1 | 
             
            {
         | 
| 2 | 
            +
              "_name_or_path": "openai/clip-vit-base-patch32",
         | 
| 3 | 
             
              "architectures": [
         | 
| 4 | 
             
                "CLIPModel"
         | 
| 5 | 
             
              ],
         | 
|  | |
| 26 | 
             
                "finetuning_task": null,
         | 
| 27 | 
             
                "forced_bos_token_id": null,
         | 
| 28 | 
             
                "forced_eos_token_id": null,
         | 
| 29 | 
            +
                "hidden_act": "quick_gelu",
         | 
| 30 | 
             
                "hidden_size": 512,
         | 
| 31 | 
             
                "id2label": {
         | 
| 32 | 
             
                  "0": "LABEL_0",
         | 
|  | |
| 51 | 
             
                "num_attention_heads": 8,
         | 
| 52 | 
             
                "num_beam_groups": 1,
         | 
| 53 | 
             
                "num_beams": 1,
         | 
| 54 | 
            +
                "num_hidden_layers": 12,
         | 
| 55 | 
             
                "num_return_sequences": 1,
         | 
| 56 | 
             
                "output_attentions": false,
         | 
| 57 | 
             
                "output_hidden_states": false,
         | 
    	
        merges.txt
    ADDED
    
    | The diff for this file is too large to render. 
		See raw diff | 
|  | 
