From a3ca97b48ed8e23059bda1372ea2733f42809947 Mon Sep 17 00:00:00 2001 From: nayef211 Date: Fri, 7 Jan 2022 18:32:21 -0800 Subject: [PATCH] Updated XLMR docs --- torchtext/models/roberta/bundler.py | 18 ++++++++++++++++++ 1 file changed, 18 insertions(+) diff --git a/torchtext/models/roberta/bundler.py b/torchtext/models/roberta/bundler.py index fcfb82dbd6..034a7f8646 100644 --- a/torchtext/models/roberta/bundler.py +++ b/torchtext/models/roberta/bundler.py @@ -168,6 +168,15 @@ def encoderConf(self) -> RobertaEncoderConf: ''' XLM-R Encoder with Base configuration + The XLM-RoBERTa model was proposed in `Unsupervised Cross-lingual Representation Learning + at Scale `. It is a large multi-lingual language model, + trained on 2.5TB of filtered CommonCrawl data and based on the RoBERTa model architecture. + + Originally published by the authors of XLM-RoBERTa under MIT License + and redistributed with the same license. + [`License `__, + `Source `__] + Please refer to :func:`torchtext.models.RobertaModelBundle` for the usage. ''' ) @@ -189,6 +198,15 @@ def encoderConf(self) -> RobertaEncoderConf: ''' XLM-R Encoder with Large configuration + The XLM-RoBERTa model was proposed in `Unsupervised Cross-lingual Representation Learning + at Scale `. It is a large multi-lingual language model, + trained on 2.5TB of filtered CommonCrawl data and based on the RoBERTa model architecture. + + Originally published by the authors of XLM-RoBERTa under MIT License + and redistributed with the same license. + [`License `__, + `Source `__] + Please refer to :func:`torchtext.models.RobertaModelBundle` for the usage. ''' )