Skip to content
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
28 changes: 28 additions & 0 deletions keras_nlp/models/albert/albert_presets.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,6 +34,13 @@
"Base size of ALBERT where all input is lowercased. "
"Trained on English Wikipedia + BooksCorpus."
),
"metadata" : {
"description": (
"Base size of ALBERT where all input is lowercased. "
"Trained on English Wikipedia + BooksCorpus."
),
"params": 11683584
},
"weights_url": "https://storage.googleapis.com/keras-nlp/models/albert_base_en_uncased/v1/model.h5",
"weights_hash": "b83ccf3418dd84adc569324183176813",
"spm_proto_url": "https://storage.googleapis.com/keras-nlp/models/albert_base_en_uncased/v1/vocab.spm",
Expand All @@ -58,6 +65,13 @@
"Large size of ALBERT where all input is lowercased. "
"Trained on English Wikipedia + BooksCorpus."
),
"metadata" : {
"description": (
"Large size of ALBERT where all input is lowercased. "
"Trained on English Wikipedia + BooksCorpus."
),
"params": 17683968
},
"weights_url": "https://storage.googleapis.com/keras-nlp/models/albert_large_en_uncased/v1/model.h5",
"weights_hash": "c7754804efb245f06dd6e7ced32e082c",
"spm_proto_url": "https://storage.googleapis.com/keras-nlp/models/albert_large_en_uncased/v1/vocab.spm",
Expand All @@ -82,6 +96,13 @@
"Extra Large size of ALBERT where all input is lowercased. "
"Trained on English Wikipedia + BooksCorpus."
),
"metadata" : {
"description": (
"Extra Large size of ALBERT where all input is lowercased. "
"Trained on English Wikipedia + BooksCorpus."
),
"params": 58724864
},
"weights_url": "https://storage.googleapis.com/keras-nlp/models/albert_extra_large_en_uncased/v1/model.h5",
"weights_hash": "713209be8aadfa614fd79f18c9aeb16d",
"spm_proto_url": "https://storage.googleapis.com/keras-nlp/models/albert_extra_large_en_uncased/v1/vocab.spm",
Expand All @@ -106,6 +127,13 @@
"Extra Large size of ALBERT where all input is lowercased. "
"Trained on English Wikipedia + BooksCorpus."
),
"metadata" : {
"description": (
"Extra Large size of ALBERT where all input is lowercased. "
"Trained on English Wikipedia + BooksCorpus."
),
"params": 222595584
},
"weights_url": "https://storage.googleapis.com/keras-nlp/models/albert_extra_extra_large_en_uncased/v1/model.h5",
"weights_hash": "a835177b692fb6a82139f94c66db2f22",
"spm_proto_url": "https://storage.googleapis.com/keras-nlp/models/albert_extra_extra_large_en_uncased/v1/vocab.spm",
Expand Down
77 changes: 77 additions & 0 deletions keras_nlp/models/bert/bert_presets.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,6 +34,14 @@
"Tiny size of BERT where all input is lowercased. "
"Trained on English Wikipedia + BooksCorpus."
),
"metadata":{
"description": (
"Tiny size of BERT where all input is lowercased. "
"Trained on English Wikipedia + BooksCorpus."
),
"params": "4M",
"models": "[BERT](bert)"
},
"weights_url": "https://storage.googleapis.com/keras-nlp/models/bert_tiny_en_uncased/v1/model.h5",
"weights_hash": "c2b29fcbf8f814a0812e4ab89ef5c068",
"vocabulary_url": "https://storage.googleapis.com/keras-nlp/models/bert_tiny_en_uncased/v1/vocab.txt",
Expand All @@ -57,6 +65,14 @@
"Small size of BERT where all input is lowercased. "
"Trained on English Wikipedia + BooksCorpus."
),
"metadata":{
"description": (
"Small size of BERT where all input is lowercased. "
"Trained on English Wikipedia + BooksCorpus."
),
"params": "29M",
"models": "[BERT](bert)"
},
"weights_url": "https://storage.googleapis.com/keras-nlp/models/bert_small_en_uncased/v1/model.h5",
"weights_hash": "08632c9479b034f342ba2c2b7afba5f7",
"vocabulary_url": "https://storage.googleapis.com/keras-nlp/models/bert_small_en_uncased/v1/vocab.txt",
Expand All @@ -80,6 +96,14 @@
"Medium size of BERT where all input is lowercased. "
"Trained on English Wikipedia + BooksCorpus."
),
"metadata":{
"description": (
"Medium size of BERT where all input is lowercased. "
"Trained on English Wikipedia + BooksCorpus."
),
"params": "41M",
"models": "[BERT](bert)"
},
"weights_url": "https://storage.googleapis.com/keras-nlp/models/bert_medium_en_uncased/v1/model.h5",
"weights_hash": "bb990e1184ec6b6185450c73833cd661",
"vocabulary_url": "https://storage.googleapis.com/keras-nlp/models/bert_medium_en_uncased/v1/vocab.txt",
Expand All @@ -103,6 +127,14 @@
"Base size of BERT where all input is lowercased. "
"Trained on English Wikipedia + BooksCorpus."
),
"metadata":{
"description": (
"Base size of BERT where all input is lowercased. "
"Trained on English Wikipedia + BooksCorpus."
),
"params": "109M",
"models": "[BERT](bert)"
},
"weights_url": "https://storage.googleapis.com/keras-nlp/models/bert_base_en_uncased/v1/model.h5",
"weights_hash": "9b2b2139f221988759ac9cdd17050b31",
"vocabulary_url": "https://storage.googleapis.com/keras-nlp/models/bert_base_en_uncased/v1/vocab.txt",
Expand All @@ -126,6 +158,14 @@
"Base size of BERT where case is maintained. "
"Trained on English Wikipedia + BooksCorpus."
),
"metadata":{
"description": (
"Base size of BERT where case is maintained. "
"Trained on English Wikipedia + BooksCorpus."
),
"params": "108M",
"models": "[BERT](bert)"
},
"weights_url": "https://storage.googleapis.com/keras-nlp/models/bert_base_en/v1/model.h5",
"weights_hash": "f94a6cb012e18f4fb8ec92abb91864e9",
"vocabulary_url": "https://storage.googleapis.com/keras-nlp/models/bert_base_en/v1/vocab.txt",
Expand All @@ -146,6 +186,11 @@
"lowercase": False,
},
"description": ("Base size of BERT. Trained on Chinese Wikipedia."),
"metadata":{
"description": ("Base size of BERT. Trained on Chinese Wikipedia."),
"params": "102M",
"models": "[BERT](bert)"
},
"weights_url": "https://storage.googleapis.com/keras-nlp/models/bert_base_zh/v1/model.h5",
"weights_hash": "79afa421e386076e62ab42dad555ab0c",
"vocabulary_url": "https://storage.googleapis.com/keras-nlp/models/bert_base_zh/v1/vocab.txt",
Expand All @@ -169,6 +214,14 @@
"Base size of BERT. Trained on trained on Wikipedias of 104 "
"languages."
),
"metadata":{
"description": (
"Base size of BERT. Trained on trained on Wikipedias of 104 "
"languages."
),
"params": "178M",
"models": "[BERT](bert)"
},
"weights_url": "https://storage.googleapis.com/keras-nlp/models/bert_base_multi/v1/model.h5",
"weights_hash": "b0631cec0a1f2513c6cfd75ba29c33aa",
"vocabulary_url": "https://storage.googleapis.com/keras-nlp/models/bert_base_multi/v1/vocab.txt",
Expand All @@ -192,6 +245,14 @@
"Large size of BERT where all input is lowercased. "
"Trained on English Wikipedia + BooksCorpus."
),
"metadata":{
"description": (
"Large size of BERT where all input is lowercased. "
"Trained on English Wikipedia + BooksCorpus."
),
"params": "335M",
"models": "[BERT](bert)"
},
"weights_url": "https://storage.googleapis.com/keras-nlp/models/bert_large_en_uncased/v1/model.h5",
"weights_hash": "cc5cacc9565ef400ee4376105f40ddae",
"vocabulary_url": "https://storage.googleapis.com/keras-nlp/models/bert_large_en_uncased/v1/vocab.txt",
Expand All @@ -215,6 +276,14 @@
"Base size of BERT where case is maintained. "
"Trained on English Wikipedia + BooksCorpus."
),
"metadata":{
"description": (
"Base size of BERT where case is maintained. "
"Trained on English Wikipedia + BooksCorpus."
),
"params": "334M",
"models": "[BERT](bert)"
},
"weights_url": "https://storage.googleapis.com/keras-nlp/models/bert_large_en/v1/model.h5",
"weights_hash": "8b8ab82290bbf4f8db87d4f100648890",
"vocabulary_url": "https://storage.googleapis.com/keras-nlp/models/bert_large_en/v1/vocab.txt",
Expand Down Expand Up @@ -247,6 +316,14 @@
"description": (
"bert_tiny_en_uncased backbone fine-tuned on the glue/sst2 dataset."
),
"metadata":{
"description": (
"Medium size of BERT where all input is lowercased. "
"Trained on English Wikipedia + BooksCorpus."
),
"params": "4M",
"models": "[BERT](bert)"
},
"weights_url": "https://storage.googleapis.com/keras-nlp/models/bert_tiny_en_uncased_sst2/v1/model.h5",
"weights_hash": "1f9c2d59f9e229e08f3fbd44239cfb0b",
"vocabulary_url": "https://storage.googleapis.com/keras-nlp/models/bert_tiny_en_uncased_sst2/v1/vocab.txt",
Expand Down
35 changes: 35 additions & 0 deletions keras_nlp/models/deberta_v3/deberta_v3_presets.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,6 +30,13 @@
"Extra small size of DeBERTaV3. "
"Trained on English Wikipedia, BookCorpus and OpenWebText."
),
"metadata":{
"description": (
"Extra small size of DeBERTaV3. "
"Trained on English Wikipedia, BookCorpus and OpenWebText."
),
"params": "71M"
},
"weights_url": "https://storage.googleapis.com/keras-nlp/models/deberta_v3_extra_small_en/v1/model.h5",
"weights_hash": "d8e10327107e5c5e20b45548a5028619",
"spm_proto_url": "https://storage.googleapis.com/keras-nlp/models/deberta_v3_extra_small_en/v1/vocab.spm",
Expand All @@ -51,6 +58,13 @@
"Small size of DeBERTaV3. "
"Trained on English Wikipedia, BookCorpus and OpenWebText."
),
"metadata":{
"description": (
"Small size of DeBERTaV3. "
"Trained on English Wikipedia, BookCorpus and OpenWebText."
),
"params": "142M"
},
"weights_url": "https://storage.googleapis.com/keras-nlp/models/deberta_v3_small_en/v1/model.h5",
"weights_hash": "84118eb7c5a735f2061ecccaf71bb888",
"spm_proto_url": "https://storage.googleapis.com/keras-nlp/models/deberta_v3_small_en/v1/vocab.spm",
Expand All @@ -72,6 +86,13 @@
"Base size of DeBERTaV3. "
"Trained on English Wikipedia, BookCorpus and OpenWebText."
),
"metadata":{
"description": (
"Base size of DeBERTaV3. "
"Trained on English Wikipedia, BookCorpus and OpenWebText."
),
"params": "184M"
},
"weights_url": "https://storage.googleapis.com/keras-nlp/models/deberta_v3_base_en/v1/model.h5",
"weights_hash": "cebce044aeed36aec9b94e3b8a255430",
"spm_proto_url": "https://storage.googleapis.com/keras-nlp/models/deberta_v3_base_en/v1/vocab.spm",
Expand All @@ -93,6 +114,13 @@
"Large size of DeBERTaV3. "
"Trained on English Wikipedia, BookCorpus and OpenWebText."
),
"metadata":{
"description": (
"Large size of DeBERTaV3. "
"Trained on English Wikipedia, BookCorpus and OpenWebText."
),
"params": "435M"
},
"weights_url": "https://storage.googleapis.com/keras-nlp/models/deberta_v3_large_en/v1/model.h5",
"weights_hash": "bce7690f358a9e39304f8c0ebc71a745",
"spm_proto_url": "https://storage.googleapis.com/keras-nlp/models/deberta_v3_large_en/v1/vocab.spm",
Expand All @@ -114,6 +142,13 @@
"Base size of DeBERTaV3. "
"Trained on the 2.5TB multilingual CC100 dataset."
),
"metadata":{
"description": (
"Base size of DeBERTaV3. "
"Trained on the 2.5TB multilingual CC100 dataset."
),
"params": "279M"
},
"weights_url": "https://storage.googleapis.com/keras-nlp/models/deberta_v3_base_multi/v1/model.h5",
"weights_hash": "26e5a824b26afd2ee336835bd337bbeb",
"spm_proto_url": "https://storage.googleapis.com/keras-nlp/models/deberta_v3_base_multi/v1/vocab.spm",
Expand Down
26 changes: 26 additions & 0 deletions keras_nlp/models/distil_bert/distil_bert_presets.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,15 @@
"Trained on English Wikipedia + BooksCorpus using BERT as the "
"teacher model."
),
"metadata":{
"description": (
"Base size of DistilBERT where all input is lowercased. "
"Trained on English Wikipedia + BooksCorpus using BERT as the "
"teacher model."
),
"params": "67M",
"models": "[DistilBert](distil_bert)"
},
"weights_url": "https://storage.googleapis.com/keras-nlp/models/distil_bert_base_en_uncased/v1/model.h5",
"weights_hash": "6625a649572e74086d74c46b8d0b0da3",
"vocabulary_url": "https://storage.googleapis.com/keras-nlp/models/distil_bert_base_en_uncased/v1/vocab.txt",
Expand All @@ -55,6 +64,15 @@
"Trained on English Wikipedia + BooksCorpus using BERT as the "
"teacher model."
),
"metadata":{
"description": (
"Base size of DistilBERT where case is maintained. "
"Trained on English Wikipedia + BooksCorpus using BERT as the "
"teacher model."
),
"params": "63M",
"models": "[DistilBert](distil_bert)"
},
"weights_url": "https://storage.googleapis.com/keras-nlp/models/distil_bert_base_en/v1/model.h5",
"weights_hash": "fa36aa6865978efbf85a5c8264e5eb57",
"vocabulary_url": "https://storage.googleapis.com/keras-nlp/models/distil_bert_base_en/v1/vocab.txt",
Expand All @@ -77,6 +95,14 @@
"Base size of DistilBERT. Trained on Wikipedias of 104 languages "
"using BERT the teacher model."
),
"metadata":{
"description": (
"Base size of DistilBERT. Trained on Wikipedias of 104 languages "
"using BERT the teacher model."
),
"params": "135M",
"models": "[DistilBert](distil_bert)"
},
"weights_url": "https://storage.googleapis.com/keras-nlp/models/distil_bert_base_multi/v1/model.h5",
"weights_hash": "c0f11095e2a6455bd3b1a6d14800a7fa",
"vocabulary_url": "https://storage.googleapis.com/keras-nlp/models/distil_bert_base_multi/v1/vocab.txt",
Expand Down
16 changes: 16 additions & 0 deletions keras_nlp/models/roberta/roberta_presets.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,14 @@
"Base size of RoBERTa where case is maintained."
"Trained on a 160 GB English dataset."
),
"metadata":{
"description": (
"Base size of RoBERTa where case is maintained."
"Trained on a 160 GB English dataset."
),
"params": "125M",
"models": "[RoBERTa](roberta)"
},
"weights_url": "https://storage.googleapis.com/keras-nlp/models/roberta_base_en/v1/model.h5",
"weights_hash": "958eede1c7edaa9308e027be18fde7a8",
"vocabulary_url": "https://storage.googleapis.com/keras-nlp/models/roberta_base_en/v1/vocab.json",
Expand All @@ -51,6 +59,14 @@
"Large size of RoBERTa where case is maintained."
"Trained on a 160 GB English dataset."
),
"metadata":{
"description": (
"Large size of RoBERTa where case is maintained."
"Trained on a 160 GB English dataset."
),
"params": "355M",
"models": "[RoBERTa](roberta)"
},
"weights_url": "https://storage.googleapis.com/keras-nlp/models/roberta_large_en/v1/model.h5",
"weights_hash": "1978b864c317a697fe62a894d3664f14",
"vocabulary_url": "https://storage.googleapis.com/keras-nlp/models/roberta_large_en/v1/vocab.json",
Expand Down