Update README.md
Browse files
README.md
CHANGED
@@ -46,11 +46,10 @@ We have fine-tuned all pre-trained models on 3 legal tasks with Indian datasets:
|
|
46 |
### Citation
|
47 |
```
|
48 |
@inproceedings{paul-2022-pretraining,
|
49 |
-
doi = {10.48550/ARXIV.2209.06049},
|
50 |
url = {https://arxiv.org/abs/2209.06049},
|
51 |
author = {Paul, Shounak and Mandal, Arpan and Goyal, Pawan and Ghosh, Saptarshi},
|
52 |
title = {Pre-trained Language Models for the Legal Domain: A Case Study on Indian Law},
|
53 |
-
booktitle = {Proceedings of ICAIL 2023}
|
54 |
year = {2023},
|
55 |
}
|
56 |
```
|
|
|
46 |
### Citation
|
47 |
```
|
48 |
@inproceedings{paul-2022-pretraining,
|
|
|
49 |
url = {https://arxiv.org/abs/2209.06049},
|
50 |
author = {Paul, Shounak and Mandal, Arpan and Goyal, Pawan and Ghosh, Saptarshi},
|
51 |
title = {Pre-trained Language Models for the Legal Domain: A Case Study on Indian Law},
|
52 |
+
booktitle = {Proceedings of 19th International Conference on Artificial Intelligence and Law - ICAIL 2023}
|
53 |
year = {2023},
|
54 |
}
|
55 |
```
|