-
Notifications
You must be signed in to change notification settings - Fork 3
/
requirements.txt
122 lines (121 loc) · 6.02 KB
/
requirements.txt
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
#
# This file is autogenerated by pip-compile
# To update, run:
#
# pip-compile
#
absl-py==0.11.0 # via tensorboard, tensorflow
appnope==0.1.0 # via ipykernel, ipython
argon2-cffi==20.1.0 # via notebook
astunparse==1.6.3 # via tensorflow
async-generator==1.10 # via nbclient
attrs==20.2.0 # via jsonschema
backcall==0.2.0 # via ipython
bleach==3.2.1 # via nbconvert
boto3==1.14.58 # via smart-open
boto==2.49.0 # via smart-open
botocore==1.17.58 # via boto3, s3transfer
cachetools==4.1.1 # via google-auth
certifi==2020.6.20 # via requests
cffi==1.14.3 # via argon2-cffi
chardet==3.0.4 # via requests
click==7.1.2 # via nltk, sacremoses
dataclasses==0.6 # via torch
decorator==4.4.2 # via ipython
defusedxml==0.6.0 # via nbconvert
docutils==0.15.2 # via botocore
entrypoints==0.3 # via nbconvert
filelock==3.0.12 # via transformers
future==0.18.2 # via torch
gast==0.3.3 # via tensorflow
gensim==3.8.3 # via pycontractions
google-auth-oauthlib==0.4.2 # via tensorboard
google-auth==1.23.0 # via google-auth-oauthlib, tensorboard
google-pasta==0.2.0 # via tensorflow
grpcio==1.33.2 # via tensorboard, tensorflow
h5py==2.10.0 # via tensorflow
idna==2.10 # via requests
ipykernel==5.3.4 # via ipywidgets, notebook
ipython-genutils==0.2.0 # via nbformat, notebook, traitlets
ipython==7.19.0 # via ipykernel, ipywidgets
ipywidgets==7.5.1 # via sexism_custom_classifier (setup.py)
jedi==0.17.2 # via ipython
jinja2==2.11.2 # via nbconvert, notebook
jmespath==0.10.0 # via boto3, botocore
joblib==0.16.0 # via nltk, sacremoses, scikit-learn
jsonschema==3.2.0 # via nbformat
jupyter-client==6.1.7 # via ipykernel, nbclient, notebook
jupyter-core==4.6.3 # via jupyter-client, nbconvert, nbformat, notebook
jupyterlab-pygments==0.1.2 # via nbconvert
keras-preprocessing==1.1.2 # via tensorflow
language-check==1.1 # via pycontractions
markdown==3.3.3 # via tensorboard
markupsafe==1.1.1 # via jinja2
mistune==0.8.4 # via nbconvert
nbclient==0.5.1 # via nbconvert
nbconvert==6.0.7 # via notebook
nbformat==5.0.8 # via ipywidgets, nbclient, nbconvert, notebook
nest-asyncio==1.4.2 # via nbclient
nltk==3.5 # via sexism_custom_classifier (setup.py)
notebook==6.1.4 # via widgetsnbextension
numpy==1.18.5 # via gensim, h5py, keras-preprocessing, opt-einsum, pandas, pyemd, scikit-learn, scipy, tensorboard, tensorflow, torch, transformers
oauthlib==3.1.0 # via requests-oauthlib
opt-einsum==3.3.0 # via tensorflow
packaging==20.4 # via bleach, transformers
pandas==1.1.2 # via sexism_custom_classifier (setup.py)
pandocfilters==1.4.3 # via nbconvert
parso==0.7.1 # via jedi
pexpect==4.8.0 # via ipython
pickleshare==0.7.5 # via ipython
prometheus-client==0.8.0 # via notebook
prompt-toolkit==3.0.8 # via ipython
protobuf==3.13.0 # via tensorboard, tensorflow, transformers
ptyprocess==0.6.0 # via pexpect, terminado
pyasn1-modules==0.2.8 # via google-auth
pyasn1==0.4.8 # via pyasn1-modules, rsa
pycontractions==2.0.1 # via sexism_custom_classifier (setup.py)
pycparser==2.20 # via cffi
pyemd==0.5.1 # via pycontractions
pygments==2.7.2 # via ipython, jupyterlab-pygments, nbconvert
pyparsing==2.4.7 # via packaging
pyrsistent==0.17.3 # via jsonschema
python-dateutil==2.8.1 # via botocore, jupyter-client, pandas
pytz==2020.1 # via pandas
pyzmq==19.0.2 # via jupyter-client, notebook
regex==2020.7.14 # via nltk, sacremoses, transformers
requests-oauthlib==1.3.0 # via google-auth-oauthlib
requests==2.24.0 # via requests-oauthlib, smart-open, tensorboard, transformers
rsa==4.6 # via google-auth
s3transfer==0.3.3 # via boto3
sacremoses==0.0.43 # via transformers
scikit-learn==0.23.2 # via sexism_custom_classifier (setup.py)
scipy==1.4.1 # via gensim, scikit-learn, tensorflow
send2trash==1.5.0 # via notebook
sentencepiece==0.1.94 # via transformers
six==1.15.0 # via absl-py, argon2-cffi, astunparse, bleach, gensim, google-auth, google-pasta, grpcio, h5py, jsonschema, keras-preprocessing, packaging, protobuf, python-dateutil, sacremoses, tensorboard, tensorflow
smart-open==2.1.1 # via gensim
tensorboard-plugin-wit==1.7.0 # via tensorboard
tensorboard==2.3.0 # via tensorflow
tensorflow-estimator==2.3.0 # via tensorflow
tensorflow==2.3.0 # via sexism_custom_classifier (setup.py)
termcolor==1.1.0 # via tensorflow
#terminado==0.9.1 # via notebook
testpath==0.4.4 # via nbconvert
threadpoolctl==2.1.0 # via scikit-learn
tokenizers==0.9.2 # via transformers
torch==1.7.0 # via sexism_custom_classifier (setup.py)
tornado==6.1 # via ipykernel, jupyter-client, notebook, terminado
tqdm==4.48.2 # via nltk, sacremoses, transformers
traitlets==5.0.5 # via ipykernel, ipython, ipywidgets, jupyter-client, jupyter-core, nbclient, nbconvert, nbformat, notebook
transformers==3.4.0 # via sexism_custom_classifier (setup.py)
tweet-preprocessor==0.6.0 # via sexism_custom_classifier (setup.py)
typing-extensions==3.7.4.3 # via torch
urllib3==1.25.10 # via botocore, requests
wcwidth==0.2.5 # via prompt-toolkit
webencodings==0.5.1 # via bleach
werkzeug==1.0.1 # via tensorboard
wheel==0.35.1 # via astunparse, tensorboard, tensorflow
widgetsnbextension==3.5.1 # via ipywidgets
wrapt==1.12.1 # via tensorflow
# The following packages are considered to be unsafe in a requirements file:
# setuptools