Skip to content

Commit

Permalink
change pipeline
Browse files Browse the repository at this point in the history
  • Loading branch information
jmargutt committed Jun 21, 2021
1 parent b6f30fc commit a3e27be
Show file tree
Hide file tree
Showing 3 changed files with 58 additions and 64 deletions.
7 changes: 2 additions & 5 deletions .dockerignore
Original file line number Diff line number Diff line change
Expand Up @@ -2,8 +2,5 @@ venv/
__pycache__/
.git/
.idea/
mosquito_model/credentials/*.json
mosquito_model/credentials/*.env
data/
notebooks/
analysis/
# mosquito_model/credentials/*.json
# mosquito_model/credentials/*.env
2 changes: 1 addition & 1 deletion mosquito_model/credentials/example-ibf-credentials.env
Original file line number Diff line number Diff line change
@@ -1,3 +1,3 @@
IBF_API_URL=https://ibf-philippines.510.global/api/admin-area-dynamic-data/upload
IBF_API_URL=https://ibf-server.510.global
ADMIN_LOGIN=username
ADMIN_PASSWORD=password
113 changes: 55 additions & 58 deletions mosquito_model/src/mosquito_model/pipeline.py
Original file line number Diff line number Diff line change
Expand Up @@ -76,10 +76,8 @@ def get_dates_in_range(begin, end):
@click.option('--predictend', default=None, help='end predictions on date (%Y-%m-%d)')
@click.option('--storeraster', is_flag=True, help='store raster data')
@click.option('--verbose', is_flag=True, help='print each step')
@click.option('--ibfupload', is_flag=True, help='upload output to IBF system using IBF-API')
@click.option('--saverequest', is_flag=True, help='save IBF-API call to json')
def main(countrycode, vector, temperaturesuitability, thresholds, demographics, credentials, admincode, data, dest,
predictstart, predictend, storeraster, verbose, ibfupload, saverequest):
predictstart, predictend, storeraster, verbose):

# initialize GEE
gee_credentials = os.path.join(credentials, 'era-service-account-credentials.json')
Expand All @@ -97,7 +95,6 @@ def main(countrycode, vector, temperaturesuitability, thresholds, demographics,
start_date = datetime.datetime.strptime(predictstart, '%Y-%m-%d') - relativedelta(months=+3)
start_date = start_date.replace(day=1)
start_date = start_date.strftime("%Y-%m-%d")
print('ECCOLA', start_date)
if predictend is not None:
end_date = datetime.datetime.strptime(predictend, '%Y-%m-%d') - relativedelta(months=+1)
end_date = end_date.strftime("%Y-%m-%d")
Expand Down Expand Up @@ -165,75 +162,75 @@ def main(countrycode, vector, temperaturesuitability, thresholds, demographics,
df_predictions['potential_cases'] = 0
df_predictions['potential_cases_U9'] = 0
df_predictions['potential_cases_65'] = 0
df_predictions['alert'] = False
df_predictions['alert_threshold'] = 0

for ix, row in df_predictions.iterrows():
place_date = (df_thresholds['adm_division']==row['adm_division']) & (df_thresholds['month']==row['month'])
coeff = df_thresholds[place_date]['coeff'].values[0]
thr_std = df_thresholds[place_date]['alert_threshold_std'].values[0]
thr_qnt = df_thresholds[place_date]['alert_threshold_qnt'].values[0]
max_thr = max(thr_std, thr_qnt)
if row['risk'] > thr_std and row['risk'] > thr_qnt:
df_predictions.at[ix, 'alert'] = True
df_predictions.at[ix, 'alert_threshold'] = 1
df_predictions.at[ix, 'potential_cases'] = int(coeff * row['risk'] * df_demo.loc[row['adm_division'], 'Population'])
df_predictions.at[ix, 'potential_cases_U9'] = int(coeff * row['risk'] * df_demo.loc[row['adm_division'], 'Population U9'])
df_predictions.at[ix, 'potential_cases_65'] = int(coeff * row['risk'] * df_demo.loc[row['adm_division'], 'Population 65+'])
df_predictions.at[ix, 'potential_cases_threshold'] = int(coeff * max_thr * df_demo.loc[row['adm_division'], 'Population'])
if verbose:
print('VECTOR SUITABILITY AND RISK PREDICTIONS AND POTENTIAL CASES')
print(df_predictions.head())
df_predictions.to_csv(predictions_data) # store predictions

if ibfupload:
# load IBF system credentials
ibf_credentials = os.path.join(credentials, 'ibf-credentials.env')
if not os.path.exists(ibf_credentials):
print(f'ERROR: IBF credentials not found in {credentials}')
raise FileNotFoundError(errno.ENOENT, os.strerror(errno.ENOENT), ibf_credentials)
load_dotenv(dotenv_path=ibf_credentials)
IBF_API_URL = os.environ.get("IBF_API_URL")
ADMIN_LOGIN = os.environ.get("ADMIN_LOGIN")
ADMIN_PASSWORD = os.environ.get("ADMIN_PASSWORD")

# prepare data to upload
today = datetime.date.today()

# loop over lead times
for num_lead_time, lead_time in enumerate(["0-month", "1-month", "2-month"]):

# select dataframe of given lead time
lead_time_date = today + relativedelta(months=num_lead_time)
df_month = df_predictions[(df_predictions['year']==lead_time_date.year)
& (df_predictions['month']==lead_time_date.month)]

# loop over layers to upload
for layer in ["alert", "potential_cases", "potential_cases_U9", "potential_cases_65"]:

# prepare layer
exposure_data = {'countryCodeISO3': countrycode}
exposure_place_codes = []
for ix, row in df_month.iterrows():
exposure_entry = {'placeCode': row['adm_division'],
'amount': row[layer]}
exposure_place_codes.append(exposure_entry)
exposure_data['exposurePlaceCodes'] = exposure_place_codes
exposure_data["leadTime"] = lead_time
exposure_data["exposureUnit"] = layer

if saverequest:
with open(os.path.join(dest, f'upload-exposure-example_leadTime-{lead_time}_exposureUnit-{layer}.json'), 'w') as fp:
json.dump(exposure_data, fp)

# upload data
login_response = requests.post(f'{IBF_API_URL}/api/user/login',
data=[('email', ADMIN_LOGIN), ('password', ADMIN_PASSWORD)])
TOKEN = login_response.json()['user']['token']
r = requests.post(os.path.join(IBF_API_URL, 'api/upload/exposure'),
json=exposure_data,
headers={'Authorization': 'Bearer '+TOKEN,
'Content-Type': 'application/json',
'Accept': 'application/json'})
if r.status_code >= 400:
print(r.text)
raise ValueError()
# load IBF system credentials
ibf_credentials = os.path.join(credentials, 'ibf-credentials.env')
if not os.path.exists(ibf_credentials):
print(f'ERROR: IBF credentials not found in {credentials}')
raise FileNotFoundError(errno.ENOENT, os.strerror(errno.ENOENT), ibf_credentials)
load_dotenv(dotenv_path=ibf_credentials)
IBF_API_URL = os.environ.get("IBF_API_URL")
ADMIN_LOGIN = os.environ.get("ADMIN_LOGIN")
ADMIN_PASSWORD = os.environ.get("ADMIN_PASSWORD")

# prepare data to upload
today = datetime.date.today()

# loop over lead times
for num_lead_time, lead_time in tqdm(enumerate(["0-month", "1-month", "2-month"])):

# select dataframe of given lead time
lead_time_date = today + relativedelta(months=num_lead_time)
df_month = df_predictions[(df_predictions['year']==lead_time_date.year)
& (df_predictions['month']==lead_time_date.month)]

# loop over layers to upload
for layer in tqdm(["alert_threshold", "potential_cases", "potential_cases_U9", "potential_cases_65",
"potential_cases_threshold"], leave=False):

# prepare layer
exposure_data = {'countryCodeISO3': countrycode}
exposure_place_codes = []
for ix, row in df_month.iterrows():
exposure_entry = {'placeCode': row['adm_division'],
'amount': row[layer]}
exposure_place_codes.append(exposure_entry)
exposure_data['exposurePlaceCodes'] = exposure_place_codes
exposure_data["adminLevel"] = 2
exposure_data["leadTime"] = lead_time
exposure_data["dynamicIndicator"] = layer

# upload data
login_response = requests.post(f'{IBF_API_URL}/api/user/login',
data=[('email', ADMIN_LOGIN), ('password', ADMIN_PASSWORD)])
TOKEN = login_response.json()['user']['token']
r = requests.post(f'{IBF_API_URL}/api/admin-area-dynamic-data/exposure',
json=exposure_data,
headers={'Authorization': 'Bearer '+TOKEN,
'Content-Type': 'application/json',
'Accept': 'application/json'})
if r.status_code >= 400:
print(r.text)
raise ValueError()


if __name__ == "__main__":
main()
Expand Down

0 comments on commit a3e27be

Please sign in to comment.