Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 2 additions & 1 deletion Pipfile
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ name = "pypi"
[packages]
black = "==18.9b0"
chainer = "==6.0.0b1"
comet-ml = "==1.0.42"
comet-ml = "==1.0.45"
cupy-cuda92 = "==6.0.0b1"
cython = "==0.29.2"
descartes = "==1.1.0"
Expand All @@ -20,6 +20,7 @@ matplotlib = "==3.0.2"
netcdf4 = "==1.4.1"
numpy = "==1.14.5"
onnx_chainer = "==1.3.0a1"
optuna = "==0.7.0"
packaging = "==19.0"
pandas = "==0.24.1"
pyproj = "==1.9.6"
Expand Down
112 changes: 92 additions & 20 deletions Pipfile.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

185 changes: 91 additions & 94 deletions deepbedmap.ipynb

Large diffs are not rendered by default.

8 changes: 5 additions & 3 deletions deepbedmap.py
Original file line number Diff line number Diff line change
Expand Up @@ -185,18 +185,20 @@ def plot_3d_view(

# %%
def load_trained_model(
filepath: str = "model/weights/srgan_generator_model_weights.npz"
model=None,
model_weights_path: str = "model/weights/srgan_generator_model_weights.npz",
):
"""
Builds the Generator component of the DeepBedMap neural network.
Also loads trained parameter weights into the model from a .npz file.
"""
srgan_train = _load_ipynb_modules("srgan_train.ipynb")

model = srgan_train.GeneratorModel()
if model is None:
model = srgan_train.GeneratorModel()

# Load trained neural network weights into model
chainer.serializers.load_npz(file=filepath, obj=model)
chainer.serializers.load_npz(file=model_weights_path, obj=model)

return model

Expand Down
46 changes: 26 additions & 20 deletions features/environment.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,6 @@
import nbformat
import pandas as pd
import quilt
import requests


def _load_ipynb_modules(ipynb_path: str):
Expand Down Expand Up @@ -87,34 +86,32 @@ def _quick_download_lowres_misc_datasets():
def _download_deepbedmap_model_weights_from_comet():
"""
Download latest neural network model weights from Comet.ML
Uses their REST API endpoint https://www.comet.ml/docs/rest-api/endpoints/
Uses their Python REST API class at https://www.comet.ml/docs/python-sdk/API/
Requires the COMET_REST_API_KEY environment variable to be set in the .env file
"""
authHeader = {"Authorization": base64.b64decode(s=os.environ["COMET_REST_API_KEY"])}

# Get list of DeepBedMap experiments (projectId a7e4f47215b94cd98d6db8a092d78232)
r = requests.get(
url="https://www.comet.ml/api/rest/v1/experiments",
params={"projectId": "a7e4f47215b94cd98d6db8a092d78232"},
headers=authHeader,
comet_api = comet_ml.API(
rest_api_key=base64.b64decode(s=os.environ["COMET_REST_API_KEY"])
)
df = pd.io.json.json_normalize(r.json()["experiments"])

# Get list of DeepBedMap experiments
project = comet_api.get(workspace="weiji14", project="deepbedmap")
df = pd.io.json.json_normalize(data=project.data["experiments"].values())

# Get the key to the latest DeepBedMap experiment on Comet ML
experiment_key = df.loc[df["start_server_timestamp"].idxmax()].experiment_key

# Use key to access url to the experiment's asset which is the hdf5 weight file
r = requests.get(
url="https://www.comet.ml/api/rest/v1/asset/get-asset-list",
params={"experimentKey": experiment_key},
headers=authHeader,
experiment = comet_api.get(
workspace="weiji14", project="deepbedmap", experiment=experiment_key
)
asset_url = r.json()[0]["link"]

# Download the neural network weight file (hdf5 format) to the right place!
r = requests.get(url=asset_url, headers=authHeader)
# Use key to access url to the experiment's asset which is the npz weight file
assets = experiment.asset_list
for asset in experiment.asset_list:
if asset["fileName"].endswith(".npz"): # make sure we pick the .npz file
asset_id = asset["assetId"]
break
# Download the neural network weight file (npz format) to the right place!
open(file="model/weights/srgan_generator_model_weights.npz", mode="wb").write(
r.content
experiment.get_asset(asset_id=asset_id)
)


Expand All @@ -125,6 +122,13 @@ def fixture_data_prep(context):
return context.data_prep


@fixture
def fixture_srgan_train(context):
# set context.srgan_train to have all the module functions
context.srgan_train = _load_ipynb_modules(ipynb_path="srgan_train.ipynb")
return context.srgan_train


@fixture
def fixture_deepbedmap(context):
# Quickly download all the neural network input datasets
Expand All @@ -139,5 +143,7 @@ def fixture_deepbedmap(context):
def before_tag(context, tag):
if tag == "fixture.data_prep":
use_fixture(fixture_func=fixture_data_prep, context=context)
elif tag == "fixture.srgan_train":
use_fixture(fixture_func=fixture_srgan_train, context=context)
elif tag == "fixture.deepbedmap":
use_fixture(fixture_func=fixture_deepbedmap, context=context)
19 changes: 18 additions & 1 deletion features/srgan_train.feature
Original file line number Diff line number Diff line change
@@ -1,2 +1,19 @@
# language: en
Feature: Super Resolution Model
@fixture.srgan_train
Feature: Train Super Resolution Model
In order to have a well performing super resolution model
As a machine learning engineer,
We want to craft and teach the model to do well on a test area

Background: Load the prepared data
Given a prepared collection of tiled raster data

Scenario Outline: Train Super Resolution Model with fixed hyperparameters
Given some hyperparameter settings <num_residual_blocks> <residual_scaling> <learning_rate>
And a compiled neural network model
When the model is trained for a while
Then we know how well the model performs on our test area

Examples: Fixed hyperparameters
| num_residual_blocks | residual_scaling | learning_rate |
| 1 | 0.3 | 5e-4 |
Loading