lewtun HF staff commited on
Commit
ff4d035
·
1 Parent(s): 0e74f9c

add config

Browse files
Files changed (1) hide show
  1. config.json +9 -1
config.json CHANGED
@@ -1,5 +1,5 @@
1
  {
2
- "_name_or_path": "BSC-TeMU/roberta-base-bne",
3
  "architectures": [
4
  "RobertaForSequenceClassification"
5
  ],
@@ -10,8 +10,16 @@
10
  "hidden_act": "gelu",
11
  "hidden_dropout_prob": 0.0,
12
  "hidden_size": 768,
 
 
 
 
13
  "initializer_range": 0.02,
14
  "intermediate_size": 3072,
 
 
 
 
15
  "layer_norm_eps": 1e-05,
16
  "max_position_embeddings": 514,
17
  "model_type": "roberta",
 
1
  {
2
+ "_name_or_path": "lewtun/roberta-base-bne-finetuned-amazon_reviews_multi",
3
  "architectures": [
4
  "RobertaForSequenceClassification"
5
  ],
 
10
  "hidden_act": "gelu",
11
  "hidden_dropout_prob": 0.0,
12
  "hidden_size": 768,
13
+ "id2label": {
14
+ "0": "NEGATIVO",
15
+ "1": "POSITIVO"
16
+ },
17
  "initializer_range": 0.02,
18
  "intermediate_size": 3072,
19
+ "label2id": {
20
+ "NEGATIVO": 0,
21
+ "POSITIVO": 1
22
+ },
23
  "layer_norm_eps": 1e-05,
24
  "max_position_embeddings": 514,
25
  "model_type": "roberta",