aboutsummaryrefslogtreecommitdiff
path: root/pkgs/development/python-modules/transformers
diff options
context:
space:
mode:
authorDaniƫl de Kok <me@danieldk.eu>2020-10-23 08:32:06 +0200
committerJonathan Ringer <jonringer@users.noreply.github.com>2020-10-28 10:41:27 -0700
commit05f6de94cf6c918ca1b573da2cb3dcc79851b64e (patch)
treeb1e276e990e243e1cd9dc35fb4caaa18da8a6e48 /pkgs/development/python-modules/transformers
parentd2e918cc12592a613a9c137adad3e47b6d3ac0ad (diff)
python3Packages.transformers: 3.3.1 -> 3.4.0
Changelog: https://github.com/huggingface/transformers/releases/tag/v3.4.0
Diffstat (limited to 'pkgs/development/python-modules/transformers')
-rw-r--r--pkgs/development/python-modules/transformers/default.nix18
1 files changed, 8 insertions, 10 deletions
diff --git a/pkgs/development/python-modules/transformers/default.nix b/pkgs/development/python-modules/transformers/default.nix
index fdb807bd0a1e..e3fd150fbcf7 100644
--- a/pkgs/development/python-modules/transformers/default.nix
+++ b/pkgs/development/python-modules/transformers/default.nix
@@ -7,6 +7,7 @@
, requests
, numpy
, parameterized
+, protobuf
, sacremoses
, sentencepiece
, timeout-decorator
@@ -17,19 +18,19 @@
buildPythonPackage rec {
pname = "transformers";
- version = "3.3.1";
+ version = "3.4.0";
src = fetchFromGitHub {
owner = "huggingface";
repo = pname;
rev = "v${version}";
- sha256 = "1j9nzhl0zw5z9rnvzfih7v6bax353rxp05b3f0cvkii3b5dbkc2j";
+ sha256 = "1v09gryxsg57d2cjwagna1535m8mbxlazdbhsww210lxa818m5qj";
};
propagatedBuildInputs = [
- boto3
filelock
numpy
+ protobuf
regex
requests
sacremoses
@@ -44,11 +45,6 @@ buildPythonPackage rec {
timeout-decorator
];
- postPatch = ''
- substituteInPlace setup.py \
- --replace "tokenizers == 0.8.1.rc2" "tokenizers>=0.8"
- '';
-
preCheck = ''
export HOME="$TMPDIR"
@@ -67,8 +63,10 @@ buildPythonPackage rec {
# Disable tests that require network access.
disabledTests = [
- "PegasusTokenizationTest"
- "T5TokenizationTest"
+ "BlenderbotSmallTokenizerTest"
+ "Blenderbot3BTokenizerTests"
+ "TokenizationTest"
+ "TestTokenizationBart"
"test_all_tokenizers"
"test_batch_encoding_is_fast"
"test_batch_encoding_pickle"