AlexanderMaz's picture
Upload acta anonymizer adapter - Latest (v20250914_103007)
a7c0f00 verified
{
"return_dict": true,
"output_hidden_states": false,
"torchscript": false,
"dtype": "float32",
"pruned_heads": {},
"tie_word_embeddings": true,
"chunk_size_feed_forward": 0,
"is_encoder_decoder": false,
"is_decoder": false,
"cross_attention_hidden_size": null,
"add_cross_attention": false,
"tie_encoder_decoder": false,
"architectures": [
"XLMRobertaForTokenClassification"
],
"finetuning_task": null,
"id2label": {
"0": "O",
"5": "B-DATA_NASTERII",
"6": "I-DATA_NASTERII",
"1": "B-NUME_PRENUME",
"2": "I-NUME_PRENUME",
"3": "B-CNP",
"29": "B-PROFESIE",
"30": "I-PROFESIE",
"31": "B-ACTIVITATE",
"32": "I-ACTIVITATE",
"13": "B-ADRESA",
"14": "I-ADRESA",
"25": "B-ORAS_NASTERE",
"23": "B-COD_POSTAL",
"24": "I-COD_POSTAL",
"17": "B-TELEFON_MOBIL",
"21": "B-EMAIL",
"22": "I-EMAIL",
"63": "B-USERNAME",
"43": "B-CONT_BANCAR",
"39": "B-EDUCATIE",
"40": "I-EDUCATIE",
"9": "B-NATIONALITATE",
"37": "B-STARE_CIVILA",
"41": "B-IBAN",
"49": "B-BULETIN",
"53": "B-ASIGURARE_MEDICALA",
"47": "B-PASAPORT",
"27": "B-TARA_NASTERE",
"33": "B-ANGAJATOR",
"34": "I-ANGAJATOR",
"15": "B-ADRESA_LUCRU",
"16": "I-ADRESA_LUCRU",
"35": "B-VENIT",
"36": "I-VENIT",
"59": "B-CONDITII_MEDICALE",
"19": "B-TELEFON_FIX",
"45": "B-CARD_NUMBER",
"46": "I-CARD_NUMBER",
"75": "B-WALLET_CRYPTO",
"69": "B-NUMAR_CONTRACT",
"70": "I-NUMAR_CONTRACT",
"73": "B-CONT_DIGITAL",
"74": "I-CONT_DIGITAL",
"50": "I-BULETIN",
"55": "B-GRUPA_SANGE",
"56": "I-GRUPA_SANGE",
"71": "B-NUMAR_PLACA",
"72": "I-NUMAR_PLACA",
"11": "B-LIMBA_VORBITA",
"12": "I-LIMBA_VORBITA",
"79": "B-SEGMENT",
"80": "I-SEGMENT",
"26": "I-ORAS_NASTERE",
"54": "I-ASIGURARE_MEDICALA",
"65": "B-DEVICE_ID",
"81": "B-EXPUS_POLITIC",
"82": "I-EXPUS_POLITIC",
"83": "B-STATUT_FATCA",
"84": "I-STATUT_FATCA",
"10": "I-NATIONALITATE",
"42": "I-IBAN",
"7": "B-SEX",
"67": "B-BIOMETRIC",
"68": "I-BIOMETRIC",
"57": "B-ALERGII",
"8": "I-SEX",
"48": "I-PASAPORT",
"51": "B-NUMAR_LICENTA",
"60": "I-CONDITII_MEDICALE",
"28": "I-TARA_NASTERE",
"58": "I-ALERGII",
"61": "B-IP_ADDRESS",
"62": "I-IP_ADDRESS",
"18": "I-TELEFON_MOBIL",
"20": "I-TELEFON_FIX",
"44": "I-CONT_BANCAR",
"4": "I-CNP",
"76": "I-WALLET_CRYPTO",
"52": "I-NUMAR_LICENTA",
"38": "I-STARE_CIVILA",
"64": "I-USERNAME",
"66": "I-DEVICE_ID",
"77": "B-NUMAR_CONT_ALT",
"78": "I-NUMAR_CONT_ALT"
},
"label2id": {
"O": 0,
"B-DATA_NASTERII": 5,
"I-DATA_NASTERII": 6,
"B-NUME_PRENUME": 1,
"I-NUME_PRENUME": 2,
"B-CNP": 3,
"B-PROFESIE": 29,
"I-PROFESIE": 30,
"B-ACTIVITATE": 31,
"I-ACTIVITATE": 32,
"B-ADRESA": 13,
"I-ADRESA": 14,
"B-ORAS_NASTERE": 25,
"B-COD_POSTAL": 23,
"I-COD_POSTAL": 24,
"B-TELEFON_MOBIL": 17,
"B-EMAIL": 21,
"I-EMAIL": 22,
"B-USERNAME": 63,
"B-CONT_BANCAR": 43,
"B-EDUCATIE": 39,
"I-EDUCATIE": 40,
"B-NATIONALITATE": 9,
"B-STARE_CIVILA": 37,
"B-IBAN": 41,
"B-BULETIN": 49,
"B-ASIGURARE_MEDICALA": 53,
"B-PASAPORT": 47,
"B-TARA_NASTERE": 27,
"B-ANGAJATOR": 33,
"I-ANGAJATOR": 34,
"B-ADRESA_LUCRU": 15,
"I-ADRESA_LUCRU": 16,
"B-VENIT": 35,
"I-VENIT": 36,
"B-CONDITII_MEDICALE": 59,
"B-TELEFON_FIX": 19,
"B-CARD_NUMBER": 45,
"I-CARD_NUMBER": 46,
"B-WALLET_CRYPTO": 75,
"B-NUMAR_CONTRACT": 69,
"I-NUMAR_CONTRACT": 70,
"B-CONT_DIGITAL": 73,
"I-CONT_DIGITAL": 74,
"I-BULETIN": 50,
"B-GRUPA_SANGE": 55,
"I-GRUPA_SANGE": 56,
"B-NUMAR_PLACA": 71,
"I-NUMAR_PLACA": 72,
"B-LIMBA_VORBITA": 11,
"I-LIMBA_VORBITA": 12,
"B-SEGMENT": 79,
"I-SEGMENT": 80,
"I-ORAS_NASTERE": 26,
"I-ASIGURARE_MEDICALA": 54,
"B-DEVICE_ID": 65,
"B-EXPUS_POLITIC": 81,
"I-EXPUS_POLITIC": 82,
"B-STATUT_FATCA": 83,
"I-STATUT_FATCA": 84,
"I-NATIONALITATE": 10,
"I-IBAN": 42,
"B-SEX": 7,
"B-BIOMETRIC": 67,
"I-BIOMETRIC": 68,
"B-ALERGII": 57,
"I-SEX": 8,
"I-PASAPORT": 48,
"B-NUMAR_LICENTA": 51,
"I-CONDITII_MEDICALE": 60,
"I-TARA_NASTERE": 28,
"I-ALERGII": 58,
"B-IP_ADDRESS": 61,
"I-IP_ADDRESS": 62,
"I-TELEFON_MOBIL": 18,
"I-TELEFON_FIX": 20,
"I-CONT_BANCAR": 44,
"I-CNP": 4,
"I-WALLET_CRYPTO": 76,
"I-NUMAR_LICENTA": 52,
"I-STARE_CIVILA": 38,
"I-USERNAME": 64,
"I-DEVICE_ID": 66,
"B-NUMAR_CONT_ALT": 77,
"I-NUMAR_CONT_ALT": 78
},
"task_specific_params": null,
"problem_type": null,
"tokenizer_class": null,
"prefix": null,
"bos_token_id": 0,
"pad_token_id": 1,
"eos_token_id": 2,
"sep_token_id": null,
"decoder_start_token_id": null,
"max_length": 20,
"min_length": 0,
"do_sample": false,
"early_stopping": false,
"num_beams": 1,
"num_beam_groups": 1,
"diversity_penalty": 0.0,
"temperature": 1.0,
"top_k": 50,
"top_p": 1.0,
"typical_p": 1.0,
"repetition_penalty": 1.0,
"length_penalty": 1.0,
"no_repeat_ngram_size": 0,
"encoder_no_repeat_ngram_size": 0,
"bad_words_ids": null,
"num_return_sequences": 1,
"output_scores": false,
"return_dict_in_generate": false,
"forced_bos_token_id": null,
"forced_eos_token_id": null,
"remove_invalid_values": false,
"exponential_decay_length_penalty": null,
"suppress_tokens": null,
"begin_suppress_tokens": null,
"_name_or_path": "EvanD/xlm-roberta-base-romanian-ner-ronec",
"transformers_version": "4.56.1",
"model_type": "xlm-roberta",
"output_past": true,
"tf_legacy_loss": false,
"use_bfloat16": false,
"vocab_size": 250002,
"hidden_size": 768,
"num_hidden_layers": 12,
"num_attention_heads": 12,
"hidden_act": "gelu",
"intermediate_size": 3072,
"hidden_dropout_prob": 0.1,
"attention_probs_dropout_prob": 0.1,
"max_position_embeddings": 514,
"type_vocab_size": 1,
"initializer_range": 0.02,
"layer_norm_eps": 1e-05,
"position_embedding_type": "absolute",
"use_cache": true,
"classifier_dropout": null,
"output_attentions": false
}