course/videos/model_api_pt.ipynb (281 lines of code) (raw):
{
"cells": [
{
"cell_type": "markdown",
"metadata": {},
"source": [
"This notebook regroups the code sample of the video below, which is a part of the [Hugging Face course](https://huggingface.co/course)."
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"cellView": "form"
},
"outputs": [
{
"data": {
"text/html": [
"<iframe width=\"560\" height=\"315\" src=\"https://www.youtube.com/embed/AhChOFRegn4?rel=0&controls=0&showinfo=0\" frameborder=\"0\" allowfullscreen></iframe>"
],
"text/plain": [
"<IPython.core.display.HTML object>"
]
},
"execution_count": null,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"#@title\n",
"from IPython.display import HTML\n",
"\n",
"HTML('<iframe width=\"560\" height=\"315\" src=\"https://www.youtube.com/embed/AhChOFRegn4?rel=0&controls=0&showinfo=0\" frameborder=\"0\" allowfullscreen></iframe>')"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"Install the Transformers and Datasets libraries to run this notebook."
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"! pip install datasets transformers[sentencepiece]"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [
{
"name": "stderr",
"output_type": "stream",
"text": [
"Some weights of the model checkpoint at bert-base-cased were not used when initializing BertModel: ['cls.predictions.transform.LayerNorm.weight', 'cls.predictions.transform.LayerNorm.bias', 'cls.seq_relationship.weight', 'cls.predictions.decoder.weight', 'cls.predictions.transform.dense.weight', 'cls.predictions.bias', 'cls.predictions.transform.dense.bias', 'cls.seq_relationship.bias']\n",
"- This IS expected if you are initializing BertModel from the checkpoint of a model trained on another task or with another architecture (e.g. initializing a BertForSequenceClassification model from a BertForPreTraining model).\n",
"- This IS NOT expected if you are initializing BertModel from the checkpoint of a model that you expect to be exactly identical (initializing a BertForSequenceClassification model from a BertForSequenceClassification model).\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"<class 'transformers.models.bert.modeling_bert.BertModel'>\n",
"<class 'transformers.models.gpt2.modeling_gpt2.GPT2Model'>\n"
]
},
{
"name": "stderr",
"output_type": "stream",
"text": [
"Some weights of the model checkpoint at facebook/bart-base were not used when initializing BartModel: ['final_logits_bias']\n",
"- This IS expected if you are initializing BartModel from the checkpoint of a model trained on another task or with another architecture (e.g. initializing a BertForSequenceClassification model from a BertForPreTraining model).\n",
"- This IS NOT expected if you are initializing BartModel from the checkpoint of a model that you expect to be exactly identical (initializing a BertForSequenceClassification model from a BertForSequenceClassification model).\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"<class 'transformers.models.bart.modeling_bart.BartModel'>\n"
]
}
],
"source": [
"from transformers import AutoModel\n",
"\n",
"bert_model = AutoModel.from_pretrained(\"bert-base-cased\")\n",
"print(type(bert_model))\n",
"\n",
"gpt_model = AutoModel.from_pretrained(\"gpt2\")\n",
"print(type(gpt_model))\n",
"\n",
"bart_model = AutoModel.from_pretrained(\"facebook/bart-base\")\n",
"print(type(bart_model))"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"<class 'transformers.models.bert.configuration_bert.BertConfig'>\n",
"<class 'transformers.models.gpt2.configuration_gpt2.GPT2Config'>\n",
"<class 'transformers.models.bart.configuration_bart.BartConfig'>\n"
]
}
],
"source": [
"from transformers import AutoConfig\n",
"\n",
"bert_config = AutoConfig.from_pretrained(\"bert-base-cased\")\n",
"print(type(bert_config))\n",
"\n",
"gpt_config = AutoConfig.from_pretrained(\"gpt2\")\n",
"print(type(gpt_config))\n",
"\n",
"bart_config = AutoConfig.from_pretrained(\"facebook/bart-base\")\n",
"print(type(bart_config))"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"<class 'transformers.models.bert.configuration_bert.BertConfig'>\n"
]
}
],
"source": [
"from transformers import BertConfig\n",
"\n",
"bert_config = BertConfig.from_pretrained(\"bert-base-cased\")\n",
"print(type(bert_config))"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"<class 'transformers.models.gpt2.configuration_gpt2.GPT2Config'>\n"
]
}
],
"source": [
"from transformers import GPT2Config\n",
"\n",
"gpt_config = GPT2Config.from_pretrained(\"gpt2\")\n",
"print(type(gpt_config))"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"<class 'transformers.models.bart.configuration_bart.BartConfig'>\n"
]
}
],
"source": [
"from transformers import BartConfig\n",
"\n",
"bart_config = BartConfig.from_pretrained(\"facebook/bart-base\")\n",
"print(type(bart_config))"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"BertConfig {\n",
" \"architectures\": [\n",
" \"BertForMaskedLM\"\n",
" ],\n",
" \"attention_probs_dropout_prob\": 0.1,\n",
" \"gradient_checkpointing\": false,\n",
" \"hidden_act\": \"gelu\",\n",
" \"hidden_dropout_prob\": 0.1,\n",
" \"hidden_size\": 768,\n",
" \"initializer_range\": 0.02,\n",
" \"intermediate_size\": 3072,\n",
" \"layer_norm_eps\": 1e-12,\n",
" \"max_position_embeddings\": 512,\n",
" \"model_type\": \"bert\",\n",
" \"num_attention_heads\": 12,\n",
" \"num_hidden_layers\": 12,\n",
" \"pad_token_id\": 0,\n",
" \"position_embedding_type\": \"absolute\",\n",
" \"transformers_version\": \"4.7.0.dev0\",\n",
" \"type_vocab_size\": 2,\n",
" \"use_cache\": true,\n",
" \"vocab_size\": 28996\n",
"}\n",
"\n"
]
}
],
"source": [
"from transformers import BertConfig\n",
"\n",
"bert_config = BertConfig.from_pretrained(\"bert-base-cased\")\n",
"print(bert_config)"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"from transformers import BertConfig, BertModel\n",
"\n",
"bert_config = BertConfig.from_pretrained(\"bert-base-cased\")\n",
"bert_model = BertModel(bert_config)"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"from transformers import BertConfig, BertModel\n",
"\n",
"bert_config = BertConfig.from_pretrained(\"bert-base-cased\")\n",
"bert_model = BertModel(bert_config)\n",
"\n",
"# Training code\n",
"\n",
"bert_model.save_pretrained(\"my_bert_model\")"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": []
}
],
"metadata": {
"colab": {
"name": "Instantiate a Transformers model (PyTorch)",
"provenance": []
}
},
"nbformat": 4,
"nbformat_minor": 4
}