Skip to content

Commit 22de005

Browse files
committed
first commit
0 parents  commit 22de005

12 files changed

+1574
-0
lines changed

KGs.zip

4.44 MB
Binary file not shown.

PretrainedModels.zip

34.1 MB
Binary file not shown.

README.md

+24
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,24 @@
1+
# Convolutional Complex Knowledge Graph Embeddings
2+
3+
4+
This repository contains the implementation of our approach along with experimental results for the reproducibility.
5+
6+
## Installation
7+
8+
First clone the repostiroy:
9+
```
10+
git clone https://github.com/XXX/XXX.git.
11+
```
12+
Then obtain the required libraries:
13+
```
14+
conda env create -f environment.yml
15+
source activate conex
16+
```
17+
The code is compatible with Python 3.6.4
18+
19+
## Reproducing reported results
20+
- Please unzip KGs.zip and PretrainedModels.zip
21+
- python reproduce_reported_results_FB15K-237.py
22+
- python reproduce_reported_results_WN18RR.py
23+
- python reproduce_reported_results_KINSHIP.py
24+
- python reproduce_reported_results_UMLS.py

environment.yml

+169
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,169 @@
1+
name: conex
2+
channels:
3+
- dglteam
4+
- pytorch
5+
- anaconda
6+
- conda-forge
7+
- defaults
8+
dependencies:
9+
- _libgcc_mutex=0.1
10+
- attrs=19.3.0
11+
- backcall=0.1.0
12+
- blas=1.0
13+
- bleach=3.1.0
14+
- bzip2=1.0.8
15+
- ca-certificates=2019.11.28
16+
- certifi=2019.11.28
17+
- cffi=1.13.2
18+
- cpuonly=1.0
19+
- cycler=0.10.0
20+
- dbus=1.13.6
21+
- decorator=4.4.1
22+
- defusedxml=0.6.0
23+
- dgl=0.4.1
24+
- entrypoints=0.3
25+
- expat=2.2.5
26+
- ffmpeg=4.2
27+
- fontconfig=2.13.1
28+
- freetype=2.9.1
29+
- gettext=0.19.8.1
30+
- glib=2.58.3
31+
- gmp=6.1.2
32+
- gnutls=3.6.5
33+
- gst-plugins-base=1.14.5
34+
- gstreamer=1.14.5
35+
- icu=58.2
36+
- importlib_metadata=1.1.0
37+
- intel-openmp=2019.4
38+
- ipykernel=5.1.3
39+
- ipython=7.9.0
40+
- ipython_genutils=0.2.0
41+
- jedi=0.15.1
42+
- jinja2=2.10.3
43+
- joblib=0.13.2
44+
- jpeg=9b
45+
- json5=0.8.5
46+
- jsonschema=3.2.0
47+
- jupyter_client=5.3.4
48+
- jupyter_core=4.6.1
49+
- jupyterlab=1.2.3
50+
- jupyterlab_server=1.0.6
51+
- kiwisolver=1.1.0
52+
- lame=3.100
53+
- libedit=3.1.20181209
54+
- libffi=3.2.1
55+
- libgcc-ng=9.1.0
56+
- libgfortran-ng=7.3.0
57+
- libiconv=1.15
58+
- libpng=1.6.37
59+
- libsodium=1.0.16
60+
- libstdcxx-ng=9.1.0
61+
- libtiff=4.1.0
62+
- libuuid=2.32.1
63+
- libxcb=1.13
64+
- libxml2=2.9.9
65+
- markupsafe=1.1.1
66+
- matplotlib=3.1.1
67+
- mistune=0.8.4
68+
- mkl=2019.4
69+
- mkl-service=2.3.0
70+
- mkl_fft=1.0.14
71+
- mkl_random=1.1.0
72+
- more-itertools=7.2.0
73+
- nbconvert=5.6.1
74+
- nbformat=4.4.0
75+
- ncurses=6.1
76+
- nettle=3.4.1
77+
- networkx=2.4
78+
- ninja=1.9.0
79+
- notebook=6.0.1
80+
- numpy=1.17.2
81+
- numpy-base=1.17.2
82+
- olefile=0.46
83+
- openh264=1.8.0
84+
- openssl=1.0.2t
85+
- pandas=0.25.1
86+
- pandoc=2.8.0.1
87+
- pandocfilters=1.4.2
88+
- parso=0.5.1
89+
- pcre=8.43
90+
- pexpect=4.7.0
91+
- pickleshare=0.7.5
92+
- pillow=6.2.1
93+
- pip=19.2.3
94+
- prometheus_client=0.7.1
95+
- prompt_toolkit=2.0.10
96+
- pthread-stubs=0.4
97+
- ptyprocess=0.6.0
98+
- pycparser=2.19
99+
- pygments=2.4.2
100+
- pyparsing=2.4.5
101+
- pyqt=5.9.2
102+
- pyrsistent=0.15.6
103+
- python=3.6.4
104+
- python-dateutil=2.8.0
105+
- pytorch=1.3.1
106+
- pytz=2019.3
107+
- pyzmq=18.1.0
108+
- qt=5.9.6
109+
- readline=7.0
110+
- scikit-learn=0.21.3
111+
- scipy=1.3.1
112+
- send2trash=1.5.0
113+
- setuptools=41.4.0
114+
- sip=4.19.8
115+
- six=1.12.0
116+
- sqlite=3.30.0
117+
- terminado=0.8.3
118+
- testpath=0.4.4
119+
- tk=8.6.8
120+
- torchvision=0.4.2
121+
- tornado=6.0.3
122+
- traitlets=4.3.3
123+
- wcwidth=0.1.7
124+
- webencodings=0.5.1
125+
- wheel=0.33.6
126+
- x264=1!152.20180806
127+
- xorg-libxau=1.0.9
128+
- xorg-libxdmcp=1.1.3
129+
- xz=5.2.4
130+
- zeromq=4.3.1
131+
- zipp=0.6.0
132+
- zlib=1.2.11
133+
- zstd=1.3.7
134+
- pip:
135+
- absl-py==0.8.1
136+
- astor==0.8.0
137+
- cachetools==3.1.1
138+
- chardet==3.0.4
139+
- click==7.0
140+
- cython==0.29.13
141+
- downhill==0.4.0
142+
- gast==0.2.2
143+
- google-auth==1.7.1
144+
- google-auth-oauthlib==0.4.1
145+
- google-pasta==0.1.8
146+
- grpcio==1.25.0
147+
- h5py==2.10.0
148+
- hdbscan==0.8.23
149+
- idna==2.8
150+
- keras-applications==1.0.8
151+
- keras-preprocessing==1.1.0
152+
- markdown==3.1.1
153+
- oauthlib==3.1.0
154+
- opt-einsum==3.1.0
155+
- protobuf==3.10.0
156+
- pyasn1==0.4.8
157+
- pyasn1-modules==0.2.7
158+
- requests==2.22.0
159+
- requests-oauthlib==1.3.0
160+
- rsa==4.0
161+
- tensorboard==2.0.1
162+
- tensorflow==2.0.0
163+
- tensorflow-estimator==2.0.1
164+
- termcolor==1.1.0
165+
- urllib3==1.25.7
166+
- werkzeug==0.16.0
167+
- wrapt==1.11.2
168+
prefix: /home/xxx/anaconda3/envs/conex
169+

helper_classes.py

+34
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,34 @@
1+
class Data:
2+
3+
def __init__(self, data_dir=None, reverse=False):
4+
5+
self.info={'dataset':data_dir,
6+
'dataset_augmentation':reverse}
7+
8+
self.train_data = self.load_data(data_dir, "train", reverse=reverse)
9+
self.valid_data = self.load_data(data_dir, "valid", reverse=reverse)
10+
self.test_data = self.load_data(data_dir, "test")
11+
self.data = self.train_data + self.valid_data + self.test_data
12+
self.entities = self.get_entities(self.data)
13+
self.train_relations = self.get_relations(self.train_data)
14+
self.valid_relations = self.get_relations(self.valid_data)
15+
self.test_relations = self.get_relations(self.test_data)
16+
self.relations = self.train_relations + [i for i in self.valid_relations \
17+
if i not in self.train_relations] + [i for i in self.test_relations \
18+
if i not in self.train_relations]
19+
20+
def load_data(self, data_dir, data_type="train", reverse=False):
21+
with open("%s%s.txt" % (data_dir, data_type), "r") as f:
22+
data = f.read().strip().split("\n")
23+
data = [i.split() for i in data]
24+
if reverse:
25+
data += [[i[2], i[1]+"_reverse", i[0]] for i in data]
26+
return data
27+
28+
def get_relations(self, data):
29+
relations = sorted(list(set([d[1] for d in data])))
30+
return relations
31+
32+
def get_entities(self, data):
33+
entities = sorted(list(set([d[0] for d in data]+[d[2] for d in data])))
34+
return entities

helper_funcs.py

+34
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,34 @@
1+
import datetime
2+
import logging
3+
import os
4+
5+
6+
def create_experiment_folder(folder_name='Experiments'):
7+
directory = os.getcwd() + '/' + folder_name + '/'
8+
folder_name = str(datetime.datetime.now())
9+
path_of_folder = directory + folder_name
10+
os.makedirs(path_of_folder)
11+
return path_of_folder, path_of_folder[:path_of_folder.rfind('/')]
12+
13+
14+
def create_logger(*, name, p):
15+
logger = logging.getLogger(name)
16+
17+
logger.setLevel(logging.INFO)
18+
# create file handler which logs even debug messages
19+
fh = logging.FileHandler(p + '/info.log')
20+
fh.setLevel(logging.INFO)
21+
22+
# create console handler with a higher log level
23+
ch = logging.StreamHandler()
24+
ch.setLevel(logging.INFO)
25+
26+
# create formatter and add it to the handlers
27+
formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
28+
ch.setFormatter(formatter)
29+
fh.setFormatter(formatter)
30+
# add the handlers to logger
31+
logger.addHandler(ch)
32+
logger.addHandler(fh)
33+
34+
return logger

0 commit comments

Comments
 (0)