|
| 1 | +<!--Copyright 2022 The HuggingFace Team. All rights reserved. |
| 2 | +
8000
div> |
| 3 | +Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with |
| 4 | +the License. You may obtain a copy of the License at |
| 5 | + |
| 6 | +http://www.apache.org/licenses/LICENSE-2.0 |
| 7 | + |
| 8 | +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on |
| 9 | +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the |
| 10 | +specific language governing permissions and limitations under the License. |
| 11 | +--> |
| 12 | + |
| 13 | +# ๋ค๊ตญ์ด ๋ชจ๋ธ ์ถ๋ก ํ๊ธฐ[[multilingual-models-for-inference]] |
| 14 | + |
| 15 | +[[open-in-colab]] |
| 16 | + |
| 17 | +๐ค Transformers์๋ ์ฌ๋ฌ ์ข
๋ฅ์ ๋ค๊ตญ์ด(multilingual) ๋ชจ๋ธ์ด ์์ผ๋ฉฐ, ๋จ์ผ ์ธ์ด(monolingual) ๋ชจ๋ธ๊ณผ ์ถ๋ก ์ ์ฌ์ฉ๋ฒ์ด ๋ค๋ฆ
๋๋ค. |
| 18 | +๊ทธ๋ ๋ค๊ณ ํด์ *๋ชจ๋ * ๋ค๊ตญ์ด ๋ชจ๋ธ์ ์ฌ์ฉ๋ฒ์ด ๋ค๋ฅธ ๊ฒ์ ์๋๋๋ค. |
| 19 | + |
| 20 | +[bert-base-multilingual-uncased](https://huggingface.co/bert-base-multilingual-uncased)์ ๊ฐ์ ๋ช๋ช ๋ชจ๋ธ์ ๋จ์ผ ์ธ์ด ๋ชจ๋ธ์ฒ๋ผ ์ฌ์ฉํ ์ ์์ต๋๋ค. |
| 21 | +์ด๋ฒ ๊ฐ์ด๋์์ ๋ค๊ตญ์ด ๋ชจ๋ธ์ ์ถ๋ก ์ ์ฌ์ฉ ๋ฐฉ๋ฒ์ ์์๋ณผ ๊ฒ์
๋๋ค. |
| 22 | + |
| 23 | +## XLM[[xlm]] |
| 24 | + |
| 25 | +XLM์๋ 10๊ฐ์ง ์ฒดํฌํฌ์ธํธ(checkpoint)๊ฐ ์๋๋ฐ, ์ด ์ค ํ๋๋ง ๋จ์ผ ์ธ์ด์
๋๋ค. |
| 26 | +๋๋จธ์ง ์ฒดํฌํฌ์ธํธ 9๊ฐ๋ ์ธ์ด ์๋ฒ ๋ฉ์ ์ฌ์ฉํ๋ ์ฒดํฌํฌ์ธํธ์ ๊ทธ๋ ์ง ์์ ์ฒดํฌํฌ์ธํธ์ ๋ ๊ฐ์ง ๋ฒ์ฃผ๋ก ๋๋ ์ ์์ต๋๋ค. |
| 27 | + |
| 28 | +### ์ธ์ด ์๋ฒ ๋ฉ์ ์ฌ์ฉํ๋ XLM[[xlm-with-language-embeddings]] |
| 29 | + |
| 30 | +๋ค์ XLM ๋ชจ๋ธ์ ์ถ๋ก ์์ ์ธ์ด ์๋ฒ ๋ฉ์ ์ฌ์ฉํฉ๋๋ค: |
| 31 | + |
| 32 | +- `xlm-mlm-ende-1024` (๋ง์คํน๋ ์ธ์ด ๋ชจ๋ธ๋ง, ์์ด-๋
์ผ์ด) |
| 33 | +- `xlm-mlm-enfr-1024` (๋ง์คํน๋ ์ธ์ด ๋ชจ๋ธ๋ง, ์์ด-ํ๋์ค์ด) |
| 34 | +- `xlm-mlm-enro-1024` (๋ง์คํน๋ ์ธ์ด ๋ชจ๋ธ๋ง, ์์ด-๋ฃจ๋ง๋์์ด) |
| 35 | +- `xlm-mlm-xnli15-1024` (๋ง์คํน๋ ์ธ์ด ๋ชจ๋ธ๋ง, XNLI ๋ฐ์ดํฐ ์ธํธ์์ ์ ๊ณตํ๋ 15๊ฐ ๊ตญ์ด) |
| 36 | +- `xlm-mlm-tlm-xnli15-1024` (๋ง์คํน๋ ์ธ์ด ๋ชจ๋ธ๋ง + ๋ฒ์ญ, XNLI ๋ฐ์ดํฐ ์ธํธ์์ ์ ๊ณตํ๋ 15๊ฐ ๊ตญ์ด) |
| 37 | +- `xlm-clm-enfr-1024` (Causal language modeling, ์์ด-ํ๋์ค์ด) |
| 38 | +- `xlm-clm-ende-1024` (Causal language modeling, ์์ด-๋
์ผ์ด) |
| 39 | + |
| 40 | +์ธ์ด ์๋ฒ ๋ฉ์ ๋ชจ๋ธ์ ์ ๋ฌ๋ `input_ids`์ ๋์ผํ shape์ ํ
์๋ก ํํ๋ฉ๋๋ค. |
| 41 | +์ด๋ฌํ ํ
์์ ๊ฐ์ ์ฌ์ฉ๋ ์ธ์ด์ ๋ฐ๋ผ ๋ค๋ฅด๋ฉฐ ํ ํฌ๋์ด์ ์ `lang2id` ๋ฐ `id2lang` ์์ฑ์ ์ํด ์๋ณ๋ฉ๋๋ค. |
| 42 | + |
| 43 | +๋ค์ ์์ ์์๋ `xlm-clm-enfr-1024` ์ฒดํฌํฌ์ธํธ(์ฝ์ ์ธ์ด ๋ชจ๋ธ๋ง(causal language modeling), ์์ด-ํ๋์ค์ด)๋ฅผ ๊ฐ์ ธ์ต๋๋ค: |
| 44 | + |
| 45 | +```py |
| 46 | +>>> import torch |
| 47 | +>>> from transformers import XLMTokenizer, XLMWithLMHeadModel |
| 48 | +
|
| 49 | +>>> tokenizer = XLMTokenizer.from_pretrained("xlm-clm-enfr-1024") |
| 50 | +>>> model = XLMWithLMHeadModel.from_pretrained("xlm-clm-enfr-1024") |
| 51 | +``` |
| 52 | +
|
| 53 | +ํ ํฌ๋์ด์ ์ `lang2id` ์์ฑ์ ๋ชจ๋ธ์ ์ธ์ด์ ํด๋น ID๋ฅผ ํ์ํฉ๋๋ค: |
| 54 | +
|
| 55 | +```py |
| 56 | +>>> print(tokenizer.lang2id) |
| 57 | +{'en': 0, 'fr': 1} |
| 58 | +``` |
| 59 | + |
| 60 | +๋ค์์ผ๋ก, ์์ ์
๋ ฅ์ ๋ง๋ญ๋๋ค: |
| 61 | + |
| 62 | +```py |
| 63 | +>>> input_ids = torch.tensor([tokenizer.encode("Wikipedia was used to")]) # ๋ฐฐ์น ํฌ๊ธฐ๋ 1์
๋๋ค |
| 64 | +``` |
| 65 | + |
| 66 | +์ธ์ด ID๋ฅผ `"en"`์ผ๋ก ์ค์ ํด ์ธ์ด ์๋ฒ ๋ฉ์ ์ ์ํฉ๋๋ค. |
| 67 | +์ธ์ด ์๋ฒ ๋ฉ์ ์์ด์ ์ธ์ด ID์ธ `0`์ผ๋ก ์ฑ์์ง ํ
์์
๋๋ค. |
| 68 | +์ด ํ
์๋ `input_ids`์ ๊ฐ์ ํฌ๊ธฐ์ฌ์ผ ํฉ๋๋ค. |
| 69 | + |
| 70 | +```py |
| 71 | +>>> language_id = tokenizer.lang2id["en"] # 0 |
| 72 | +>>> langs = torch.tensor([language_id] * input_ids.shape[1]) # torch.tensor([0, 0, 0, ..., 0]) |
| 73 | + |
| 74 | +>>> # (batch_size, sequence_length) shape์ ํ
์๊ฐ ๋๋๋ก ๋ง๋ญ๋๋ค. |
| 75 | +>>> langs = langs.view(1, -1) # ์ด์ [1, sequence_length] shape์ด ๋์์ต๋๋ค(๋ฐฐ์น ํฌ๊ธฐ๋ 1์
๋๋ค) |
| 76 | +``` |
| 77 | + |
| 78 | +์ด์ `input_ids`์ ์ธ์ด ์๋ฒ ๋ฉ์ ๋ชจ๋ธ๋ก ์ ๋ฌํฉ๋๋ค: |
| 79 | + |
| 80 | +```py |
| 81 | +>>> outputs = model(input_ids, langs=langs) |
| 82 | +``` |
| 83 | + |
| 84 | +[run_generation.py](https://github.com/huggingface/transformers/tree/main/examples/pytorch/text-generation/run_generation.py) ์คํฌ๋ฆฝํธ๋ก `xlm-clm` ์ฒดํฌํฌ์ธํธ๋ฅผ ์ฌ์ฉํด ํ
์คํธ์ ์ธ์ด ์๋ฒ ๋ฉ์ ์์ฑํ ์ ์์ต๋๋ค. |
| 85 | + |
| 86 | +### ์ธ์ด ์๋ฒ ๋ฉ์ ์ฌ์ฉํ์ง ์๋ XLM[[xlm-without-language-embeddings]] |
| 87 | + |
| 88 | +๋ค์ XLM ๋ชจ๋ธ์ ์ถ๋ก ์์ ์ธ์ด ์๋ฒ ๋ฉ์ด ํ์ํ์ง ์์ต๋๋ค: |
| 89 | + |
| 90 | +- `xlm-mlm-17-1280` (๋ง์คํน๋ ์ธ์ด ๋ชจ๋ธ๋ง, 17๊ฐ ๊ตญ์ด) |
| 91 | +- `xlm-mlm-100-1280` (๋ง์คํน๋ ์ธ์ด ๋ชจ๋ธ๋ง, 100๊ฐ ๊ตญ์ด) |
| 92 | + |
| 93 | +์ด์ ์ XLM ์ฒดํฌํฌ์ธํธ์ ๋ฌ๋ฆฌ ์ด ๋ชจ๋ธ์ ์ผ๋ฐ ๋ฌธ์ฅ ํํ์ ์ฌ์ฉ๋ฉ๋๋ค. |
| 94 | + |
| 95 | +## BERT[[bert]] |
| 96 | + |
| 97 | +๋ค์ BERT ๋ชจ๋ธ์ ๋ค๊ตญ์ด ํ์คํฌ์ ์ฌ์ฉํ ์ ์์ต๋๋ค: |
| 98 | + |
| 99 | +- `bert-base-multilingual-uncased` (๋ง์คํน๋ ์ธ์ด ๋ชจ๋ธ๋ง + ๋ค์ ๋ฌธ์ฅ ์์ธก, 102๊ฐ ๊ตญ์ด) |
| 100 | +- `bert-base-multilingual-cased` (๋ง์คํน๋ ์ธ์ด ๋ชจ๋ธ๋ง + ๋ค์ ๋ฌธ์ฅ ์์ธก, 104๊ฐ ๊ตญ์ด) |
| 101 | + |
| 102 | +์ด๋ฌํ ๋ชจ๋ธ์ ์ถ๋ก ์์ ์ธ์ด ์๋ฒ ๋ฉ์ด ํ์ํ์ง ์์ต๋๋ค. |
| 103 | +๋ฌธ๋งฅ์์ ์ธ์ด๋ฅผ ์๋ณํ๊ณ , ์๋ณ๋ ์ธ์ด๋ก ์ถ๋ก ํฉ๋๋ค. |
| 104 | + |
| 105 | +## XLM-RoBERTa[[xlmroberta]] |
| 106 | + |
| 107 | +๋ค์ XLM-RoBERTa ๋ํ ๋ค๊ตญ์ด ๋ค๊ตญ์ด ํ์คํฌ์ ์ฌ์ฉํ ์ ์์ต๋๋ค: |
| 108 | + |
| 109 | +- `xlm-roberta-base` (๋ง์คํน๋ ์ธ์ด ๋ชจ๋ธ๋ง, 100๊ฐ ๊ตญ์ด) |
| 110 | +- `xlm-roberta-large` (๋ง์คํน๋ ์ธ์ด ๋ชจ๋ธ๋ง, 100๊ฐ ๊ตญ์ด) |
| 111 | + |
| 112 | +XLM-RoBERTa๋ 100๊ฐ ๊ตญ์ด์ ๋ํด ์๋ก ์์ฑ๋๊ณ ์ ์ ๋ 2.5TB ๊ท๋ชจ์ CommonCrawl ๋ฐ์ดํฐ๋ก ํ์ต๋์์ต๋๋ค. |
| 113 | +์ด์ ์ ๊ณต๊ฐ๋ mBERT๋ XLM๊ณผ ๊ฐ์ ๋ค๊ตญ์ด ๋ชจ๋ธ์ ๋นํด ๋ถ๋ฅ, ์ํ์ค ๋ผ๋ฒจ๋ง, ์ง์ ์๋ต๊ณผ ๊ฐ์ ๋ค์ด์คํธ๋ฆผ(downstream) ์์
์์ ์ด์ ์ด ์์ต๋๋ค. |
| 114 | + |
| 115 | +## M2M100[[m2m100]] |
| 116 | + |
| 117 | +๋ค์ M2M100 ๋ชจ๋ธ ๋ํ ๋ค๊ตญ์ด ๋ค๊ตญ์ด ํ์คํฌ์ ์ฌ์ฉํ ์ ์์ต๋๋ค: |
| 118 | + |
| 119 | +- `facebook/m2m100_418M` (๋ฒ์ญ) |
| 120 | +- `facebook/m2m100_1.2B` (๋ฒ์ญ) |
| 121 | + |
| 122 | +์ด ์์ ์์๋ `facebook/m2m100_418M` ์ฒดํฌํฌ์ธํธ๋ฅผ ๊ฐ์ ธ์์ ์ค๊ตญ์ด๋ฅผ ์์ด๋ก ๋ฒ์ญํฉ๋๋ค. |
| 123 | +ํ ํฌ๋์ด์ ์์ ๋ฒ์ญ ๋์ ์ธ์ด(source language)๋ฅผ ์ค์ ํ ์ ์์ต๋๋ค: |
| 124 | + |
| 125 | +```py |
| 126 | +>>> from transformers import M2M100ForConditionalGeneration, M2M100Tokenizer |
| 127 | + |
| 128 | +>>> en_text = "Do not meddle in the affairs of wizards, for they are subtle and quick to anger." |
| 129 | +>>> chinese_text = "ไธ่ฆๆๆๅทซๅธซ็ไบๅ, ๅ ็บไปๅๆฏๅพฎๅฆ็, ๅพๅฟซๅฐฑๆ็ผๆ." |
| 130 | + |
| 131 | +>>> tokenizer = M2M100Tokenizer.from_pretrained("facebook/m2m100_418M", src_lang="zh") |
| 132 | +>>> model = M2M100ForConditionalGeneration.from_pretrained("facebook/m2m100_418M") |
| 133 | +``` |
| 134 | + |
| 135 | +๋ฌธ์ฅ์ ํ ํฐํํฉ๋๋ค: |
| 136 | + |
| 137 | +```py |
| 138 | +>>> encoded_zh = tokenizer(chinese_text, return_tensors="pt") |
| 139 | +``` |
| 140 | + |
| 141 | +M2M100์ ๋ฒ์ญ์ ์งํํ๊ธฐ ์ํด ์ฒซ ๋ฒ์งธ๋ก ์์ฑ๋๋ ํ ํฐ์ ๋ฒ์ญํ ์ธ์ด(target language) ID๋ก ๊ฐ์ ์ง์ ํฉ๋๋ค. |
| 142 | +์์ด๋ก ๋ฒ์ญํ๊ธฐ ์ํด `generate` ๋ฉ์๋์์ `forced_bos_token_id`๋ฅผ `en`์ผ๋ก ์ค์ ํฉ๋๋ค: |
| 143 | + |
| 144 | +```py |
| 145 | +>>> generated_tokens = model.generate(**encoded_zh, forced_bos_token_id=tokenizer.get_lang_id("en")) |
| 146 | +>>> tokenizer.batch_decode(generated_tokens, skip_special_tokens=True) |
| 147 | +'Do not interfere with the matters of the witches, because they are delicate and will soon be angry.' |
| 148 | +``` |
| 149 | + |
| 150 | +## MBart[[mbart]] |
| 151 | + |
| 152 | +๋ค์ MBart ๋ชจ๋ธ ๋ํ ๋ค๊ตญ์ด ํ์คํฌ์ ์ฌ์ฉํ ์ ์์ต๋๋ค: |
| 153 | + |
| 154 | +- `facebook/mbart-large-50-one-to-many-mmt` (์ผ๋๋ค ๋ค๊ตญ์ด ๋ฒ์ญ, 50๊ฐ ๊ตญ์ด) |
| 155 | +- `facebook/mbart-large-50-many-to-many-mmt` (๋ค๋๋ค ๋ค๊ตญ์ด ๋ฒ์ญ, 50๊ฐ ๊ตญ์ด) |
| 156 | +- `facebook/mbart-large-50-many-to-one-mmt` (๋ค๋์ผ ๋ค๊ตญ์ด ๋ฒ์ญ, 50๊ฐ ๊ตญ์ด) |
| 157 | +- `facebook/mbart-large-50` (๋ค๊ตญ์ด ๋ฒ์ญ, 50๊ฐ ๊ตญ์ด) |
| 158 | +- `facebook/mbart-large-cc25` |
| 159 | + |
| 160 | +์ด ์์ ์์๋ ํ๋๋์ด๋ฅผ ์์ด๋ก ๋ฒ์ญํ๊ธฐ ์ํด `facebook/mbart-large-50-many-to-many-mmt` ์ฒดํฌํฌ์ธํธ๋ฅผ ๊ฐ์ ธ์ต๋๋ค. |
| 161 | +ํ ํฌ๋์ด์ ์์ ๋ฒ์ญ ๋์ ์ธ์ด(source language)๋ฅผ ์ค์ ํ ์ ์์ต๋๋ค: |
| 162 | + |
| 163 | +```py |
| 164 | +>>> from transformers import AutoTokenizer, AutoModelForSeq2SeqLM |
| 165 | + |
| 166 | +>>> en_text = "Do not meddle in the affairs of wizards, for they are subtle and quick to anger." |
| 167 | +>>> fi_text = "รlรค sekaannu velhojen asioihin, sillรค ne ovat hienovaraisia ja nopeasti vihaisia." |
| 168 | + |
| 169 | +>>> tokenizer = AutoTokenizer.from_pretrained("facebook/mbart-large-50-many-to-many-mmt", src_lang="fi_FI") |
| 170 | +>>> model = AutoModelForSeq2SeqLM.from_pretrained("facebook/mbart-large-50-many-to-many-mmt") |
| 171 | +``` |
| 172 | + |
| 173 | +๋ฌธ์ฅ์ ํ ํฐํํฉ๋๋ค: |
| 174 | + |
| 175 | +```py |
| 176 | +>>> encoded_en = tokenizer(en_text, return_tensors="pt") |
| 177 | +``` |
| 178 | + |
| 179 | +MBart๋ ๋ฒ์ญ์ ์งํํ๊ธฐ ์ํด ์ฒซ ๋ฒ์งธ๋ก ์์ฑ๋๋ ํ ํฐ์ ๋ฒ์ญํ ์ธ์ด(target language) ID๋ก ๊ฐ์ ์ง์ ํฉ๋๋ค. |
| 180 | +์์ด๋ก ๋ฒ์ญํ๊ธฐ ์ํด `generate` ๋ฉ์๋์์ `forced_bos_token_id`๋ฅผ `en`์ผ๋ก ์ค์ ํฉ๋๋ค: |
| 181 | + |
| 182 | +```py |
| 183 | +>>> generated_tokens = model.generate(**encoded_en, forced_bos_token_id=tokenizer.lang_code_to_id("en_XX")) |
| 184 | +>>> tokenizer.batch_decode(generated_tokens, skip_special_tokens=True) |
| 185 | +"Don't interfere with the wizard's affairs, because they are subtle, will soon get angry." |
| 186 | +``` |
| 187 | + |
| 188 | +`facebook/mbart-large-50-many-to-one-mmt` ์ฒดํฌํฌ์ธํธ๋ฅผ ์ฌ์ฉํ๊ณ ์๋ค๋ฉด, ์ฒซ ๋ฒ์งธ๋ก ์์ฑ๋๋ ํ ํฐ์ ๋ฒ์ญํ ์ธ์ด(target language) ID๋ก ๊ฐ์ ์ง์ ํ ํ์๋ ์์ต๋๋ค. |
0 commit comments