From c937791759354bb2ff9f72ef27e9d7d3da64a359 Mon Sep 17 00:00:00 2001 From: Jorge Ejarque Date: Fri, 1 Mar 2024 11:40:11 +0100 Subject: [PATCH] Update with new dls pipelines --- .../playbooks/submit_dag_git2ssh_data.yaml | 73 ++ .../playbooks/submit_dag_http2ssh_data.yaml | 2 +- .../playbooks/submit_dag_mlflow_upload.yaml | 63 ++ .../submit_dag_model_search_upload.yaml | 64 ++ .../playbooks/submit_dag_model_stagein.yaml | 72 ++ .../playbooks/submit_dag_ssh2ssh_data.yaml | 103 +++ .../playbooks/submit_dag_stagein_data.yaml | 1 - .../playbooks/submit_dag_stageout_data.yaml | 4 +- .../submit_dag_webdav_stagein_data.yaml | 66 ++ .../submit_dag_webdav_stageout_data.yaml | 64 ++ ansible/types.yml | 834 +++++++++++++++++- 11 files changed, 1300 insertions(+), 46 deletions(-) create mode 100644 ansible/playbooks/submit_dag_git2ssh_data.yaml create mode 100644 ansible/playbooks/submit_dag_mlflow_upload.yaml create mode 100644 ansible/playbooks/submit_dag_model_search_upload.yaml create mode 100644 ansible/playbooks/submit_dag_model_stagein.yaml create mode 100644 ansible/playbooks/submit_dag_ssh2ssh_data.yaml create mode 100644 ansible/playbooks/submit_dag_webdav_stagein_data.yaml create mode 100644 ansible/playbooks/submit_dag_webdav_stageout_data.yaml diff --git a/ansible/playbooks/submit_dag_git2ssh_data.yaml b/ansible/playbooks/submit_dag_git2ssh_data.yaml new file mode 100644 index 0000000..d7dab82 --- /dev/null +++ b/ansible/playbooks/submit_dag_git2ssh_data.yaml @@ -0,0 +1,73 @@ +--- + +- name: Set host from env + set_fact: + host: "{{ host_from_env }}" + when: + - host_from_env is defined + - host_from_env | length > 0 + +- name: Set git_url + set_fact: + git_url: "{{ git_url_input }}" + when: + - git_url_input is defined + - git_url_input | length > 0 + +- name: Set git_repo + set_fact: + git_repo: "{{ git_repo_input }}" + when: + - git_repo_input is defined + - git_repo_input | length > 0 + +- name: Set target_path + set_fact: + target_path: "{{ target_path_input }}" + when: + - target_path_input is defined + - target_path_input | length > 0 + +- name: Set login + set_fact: + login: "{{ login_input }}" + when: + - login_input is defined + - login_input | length > 0 + +- name: Set vault_id + set_fact: + vault_id: "{{ vault_id_input }}" + when: + - vault_id_input is defined + - vault_id_input | length > 0 + +- name: Set target conf + set_fact: + target_conf: + gitlab_url: "{{ git_url }}" + gitlab_repo: "{{ git_repo }}" + target: "{{ target_path }}" + host: "{{ host }}" + login: "{{ login }}" + vault_id: "{{ vault_id }}" + no_log: "{{ debug_logs | default('false') == 'false' }}" + + +- name: add extra_conf + set_fact: + target_conf: "{{ conf | from_json | combine(target_conf) }}" + when: + - conf is defined + - conf | length > 0 + no_log: "{{ debug_logs | default('false') == 'false' }}" + +- name: Set request body + set_fact: + request_body: + dag_run_id: "{{ login }}-{{ 1000000000 | random | to_uuid }}" + conf: "{{ target_conf }}" + no_log: "{{ debug_logs | default('false') == 'false' }}" + +- name: Import Submit DAG RUN to Airflow + include_tasks: submit_tasks.yaml diff --git a/ansible/playbooks/submit_dag_http2ssh_data.yaml b/ansible/playbooks/submit_dag_http2ssh_data.yaml index 906ed22..02eb90a 100644 --- a/ansible/playbooks/submit_dag_http2ssh_data.yaml +++ b/ansible/playbooks/submit_dag_http2ssh_data.yaml @@ -42,7 +42,7 @@ force: "{{ force | bool }}" target: "{{ target_path }}" host: "{{ host }}" - port: "{{ port }}" + port: "{{ port | int }}" login: "{{ login }}" vault_id: "{{ vault_id }}" no_log: "{{ debug_logs | default('false') == 'false' }}" diff --git a/ansible/playbooks/submit_dag_mlflow_upload.yaml b/ansible/playbooks/submit_dag_mlflow_upload.yaml new file mode 100644 index 0000000..c19497a --- /dev/null +++ b/ansible/playbooks/submit_dag_mlflow_upload.yaml @@ -0,0 +1,63 @@ +--- +- name: Set host from env + set_fact: + host: "{{ host_from_env }}" + when: + - host_from_env is defined + - host_from_env | length > 0 + +- name: Set location + set_fact: + location: "{{ location_input }}" + when: + - location_input is defined + - location_input | length > 0 + +- name: Set location with subfolder + set_fact: + location: "{{ location_input }}/{{ subfolder}}" + when: + - subfolder is defined + - subfolder | length > 0 + +- name: Set login + set_fact: + login: "{{ login_input }}" + when: + - login_input is defined + - login_input | length > 0 + +- name: Set vault_id + set_fact: + vault_id: "{{ vault_id_input }}" + when: + - vault_id_input is defined + - vault_id_input | length > 0 + +- name: Set target conf + set_fact: + target_conf: + location: "{{ location }}" + host: "{{ host }}" + login: "{{ login }}" + vault_id: "{{ vault_id }}" + no_log: "{{ debug_logs | default('false') == 'false' }}" + + +- name: add extra_conf + set_fact: + target_conf: "{{ conf | from_json | combine(target_conf) }}" + when: + - conf is defined + - conf | length > 0 + no_log: "{{ debug_logs | default('false') == 'false' }}" + +- name: Set request body + set_fact: + request_body: + dag_run_id: "{{ login }}-{{ 1000000000 | random | to_uuid }}" + conf: "{{ target_conf }}" + no_log: "{{ debug_logs | default('false') == 'false' }}" + +- name: Import Submit DAG RUN to Airflow + include_tasks: submit_tasks.yaml diff --git a/ansible/playbooks/submit_dag_model_search_upload.yaml b/ansible/playbooks/submit_dag_model_search_upload.yaml new file mode 100644 index 0000000..2a8d0e2 --- /dev/null +++ b/ansible/playbooks/submit_dag_model_search_upload.yaml @@ -0,0 +1,64 @@ +--- +- name: Set host from env + set_fact: + host: "{{ host_from_env }}" + when: + - host_from_env is defined + - host_from_env | length > 0 + +- name: Set location + set_fact: + location: "{{ location_input }}" + when: + - location_input is defined + - location_input | length > 0 + +- name: Set experiment + set_fact: + experiment: "{{ experiment_input }}" + when: + - experiment_input is defined + - experiment_input | length > 0 + +- name: Set login + set_fact: + login: "{{ login_input }}" + when: + - login_input is defined + - login_input | length > 0 + +- name: Set vault_id + set_fact: + vault_id: "{{ vault_id_input }}" + when: + - vault_id_input is defined + - vault_id_input | length > 0 + +- name: Set target conf + set_fact: + target_conf: + location: "{{ location }}" + experiment_name: "{{ experiment }}" + host: "{{ host }}" + login: "{{ login }}" + vault_id: "{{ vault_id }}" + no_log: "{{ debug_logs | default('false') == 'false' }}" + + +- name: add extra_conf + set_fact: + target_conf: "{{ conf | from_json | combine(target_conf) }}" + when: + - conf is defined + - conf | length > 0 + no_log: "{{ debug_logs | default('false') == 'false' }}" + +- name: Set request body + set_fact: + request_body: + dag_run_id: "{{ login }}-{{ 1000000000 | random | to_uuid }}" + conf: "{{ target_conf }}" + no_log: "{{ debug_logs | default('false') == 'false' }}" + +- name: Import Submit DAG RUN to Airflow + include_tasks: submit_tasks.yaml diff --git a/ansible/playbooks/submit_dag_model_stagein.yaml b/ansible/playbooks/submit_dag_model_stagein.yaml new file mode 100644 index 0000000..cd57386 --- /dev/null +++ b/ansible/playbooks/submit_dag_model_stagein.yaml @@ -0,0 +1,72 @@ +--- +- name: Set host from env + set_fact: + host: "{{ host_from_env }}" + when: + - host_from_env is defined + - host_from_env | length > 0 + +- name: Set location + set_fact: + location: "{{ location_input }}" + when: + - location_input is defined + - location_input | length > 0 + +- name: Set experiment + set_fact: + experiment: "{{ experiment_input }}" + when: + - experiment_input is defined + - experiment_input | length > 0 + +- name: Set model_path + set_fact: + model_path: "{{ model_path_input }}" + when: + - model_path_input is defined + - model_path_input | length > 0 + +- name: Set login + set_fact: + login: "{{ login_input }}" + when: + - login_input is defined + - login_input | length > 0 + +- name: Set vault_id + set_fact: + vault_id: "{{ vault_id_input }}" + when: + - vault_id_input is defined + - vault_id_input | length > 0 + +- name: Set target conf + set_fact: + target_conf: + location: "{{ location }}" + mlflow_runid: "{{ experiment }}" + mlflow_modelpath: "{{ model_path }}" + host: "{{ host }}" + login: "{{ login }}" + vault_id: "{{ vault_id }}" + no_log: "{{ debug_logs | default('false') == 'false' }}" + + +- name: add extra_conf + set_fact: + target_conf: "{{ conf | from_json | combine(target_conf) }}" + when: + - conf is defined + - conf | length > 0 + no_log: "{{ debug_logs | default('false') == 'false' }}" + +- name: Set request body + set_fact: + request_body: + dag_run_id: "{{ login }}-{{ 1000000000 | random | to_uuid }}" + conf: "{{ target_conf }}" + no_log: "{{ debug_logs | default('false') == 'false' }}" + +- name: Import Submit DAG RUN to Airflow + include_tasks: submit_tasks.yaml diff --git a/ansible/playbooks/submit_dag_ssh2ssh_data.yaml b/ansible/playbooks/submit_dag_ssh2ssh_data.yaml new file mode 100644 index 0000000..1c7ac71 --- /dev/null +++ b/ansible/playbooks/submit_dag_ssh2ssh_data.yaml @@ -0,0 +1,103 @@ +--- + +- name: Set source_host from env + set_fact: + source_host: "{{ source_host_from_env }}" + when: + - source_host_from_env is defined + - source_host_from_env | length > 0 + +- name: Set targer_host from env + set_fact: + target_host: "{{ target_host_from_env }}" + when: + - target_host_from_env is defined + - target_host_from_env | length > 0 + +- name: Set source_path + set_fact: + source_path: "{{ source_path_input }}" + when: + - source_path_input is defined + - source_path_input | length > 0 + +- name: Set target_path + set_fact: + target_path: "{{ target_path_input }}" + when: + - target_path_input is defined + - target_path_input | length > 0 + +- name: Set source login + set_fact: + source_login: "{{ source_login_input }}" + when: + - source_login_input is defined + - source_login_input | length > 0 + +- name: Set target login + set_fact: + target_login: "{{ target_login_input }}" + when: + - target_login_input is defined + - target_login_input | length > 0 + +- name: Set source vault_id + set_fact: + source_vault_id: "{{ source_vault_id_input }}" + when: + - source_vault_id_input is defined + - source_vault_id_input | length > 0 + +- name: Set target vault_id + set_fact: + target_vault_id: "{{ target_vault_id_input }}" + when: + - target_vault_id_input is defined + - target_vault_id_input | length > 0 + +- name: Set source vault_id as target vault_id when it is not defined + set_fact: + source_vault_id: "{{ target_vault_id }}" + when: + - target_vault_id is defined + - source_vault_id | length == 0 + +- name: Set source login when not defined + set_fact: + source_login: "{{ target_login }}" + when: + - target_login is defined + - source_login | length == 0 + +- name: Set target conf + set_fact: + target_conf: + source_path: "{{ source_path }}" + source_host: "{{ source_host }}" + source_login: "{{ source_login }}" + source_vault_id: "{{ source_vault_id }}" + target_path: "{{ target_path }}" + target_host: "{{ target_host }}" + target_login: "{{ target_login }}" + target_vault_id: "{{ target_vault_id }}" + no_log: "{{ debug_logs | default('false') == 'false' }}" + + +- name: add extra_conf + set_fact: + target_conf: "{{ conf | from_json | combine(target_conf) }}" + when: + - conf is defined + - conf | length > 0 + no_log: "{{ debug_logs | default('false') == 'false' }}" + +- name: Set request body + set_fact: + request_body: + dag_run_id: "{{ target_login }}-{{ 1000000000 | random | to_uuid }}" + conf: "{{ target_conf }}" + no_log: "{{ debug_logs | default('false') == 'false' }}" + +- name: Import Submit DAG RUN to Airflow + include_tasks: submit_tasks.yaml diff --git a/ansible/playbooks/submit_dag_stagein_data.yaml b/ansible/playbooks/submit_dag_stagein_data.yaml index a2d0834..432bc4a 100644 --- a/ansible/playbooks/submit_dag_stagein_data.yaml +++ b/ansible/playbooks/submit_dag_stagein_data.yaml @@ -41,7 +41,6 @@ oid: "{{ oid }}" target: "{{ target_path }}" host: "{{ host }}" - port: "{{ port }}" login: "{{ login }}" vault_id: "{{ vault_id }}" no_log: "{{ debug_logs | default('false') == 'false' }}" diff --git a/ansible/playbooks/submit_dag_stageout_data.yaml b/ansible/playbooks/submit_dag_stageout_data.yaml index e2eebeb..ccdd182 100644 --- a/ansible/playbooks/submit_dag_stageout_data.yaml +++ b/ansible/playbooks/submit_dag_stageout_data.yaml @@ -47,10 +47,10 @@ mid: "{{ mid }}" source: "{{ source_path }}" host: "{{ host }}" - port: "{{ port }}" + port: "{{ port | int }}" login: "{{ login }}" vault_id: "{{ vault_id }}" - register: "{{ register }}" + register: "{{ register | bool }}" no_log: "{{ debug_logs | default('false') == 'false' }}" diff --git a/ansible/playbooks/submit_dag_webdav_stagein_data.yaml b/ansible/playbooks/submit_dag_webdav_stagein_data.yaml new file mode 100644 index 0000000..aa69d3d --- /dev/null +++ b/ansible/playbooks/submit_dag_webdav_stagein_data.yaml @@ -0,0 +1,66 @@ +--- + +- name: Set host from env + set_fact: + host: "{{ host_from_env }}" + when: + - host_from_env is defined + - host_from_env | length > 0 + +- name: Set oid + set_fact: + oid: "{{ oid_input }}" + when: + - oid_input is defined + - oid_input | length > 0 + +- name: Set target_path + set_fact: + target_path: "{{ target_path_input }}" + when: + - target_path_input is defined + - target_path_input | length > 0 + +- name: Set login + set_fact: + login: "{{ login_input }}" + when: + - login_input is defined + - login_input | length > 0 + +- name: Set vault_id + set_fact: + vault_id: "{{ vault_id_input }}" + when: + - vault_id_input is defined + - vault_id_input | length > 0 + +- name: Set target conf + set_fact: + target_conf: + oid: "{{ oid }}" + force: "{{ force | bool }}" + target: "{{ target_path }}" + host: "{{ host }}" + login: "{{ login }}" + vault_id: "{{ vault_id }}" + no_log: "{{ debug_logs | default('false') == 'false' }}" + + +- name: add extra_conf + set_fact: + target_conf: "{{ conf | from_json | combine(target_conf) }}" + when: + - conf is defined + - conf | length > 0 + no_log: "{{ debug_logs | default('false') == 'false' }}" + +- name: Set request body + set_fact: + request_body: + dag_run_id: "{{ login }}-{{ 1000000000 | random | to_uuid }}" + conf: "{{ target_conf }}" + no_log: "{{ debug_logs | default('false') == 'false' }}" + +- name: Import Submit DAG RUN to Airflow + include_tasks: submit_tasks.yaml diff --git a/ansible/playbooks/submit_dag_webdav_stageout_data.yaml b/ansible/playbooks/submit_dag_webdav_stageout_data.yaml new file mode 100644 index 0000000..731488e --- /dev/null +++ b/ansible/playbooks/submit_dag_webdav_stageout_data.yaml @@ -0,0 +1,64 @@ +--- +- name: Set host from env + set_fact: + host: "{{ host_from_env }}" + when: + - host_from_env is defined + - host_from_env | length > 0 + +- name: Set oid + set_fact: + oid: "{{ oid_input }}" + when: + - oid_input is defined + - oid_input | length > 0 + +- name: Set source_path + set_fact: + source_path: "{{ source_path_input }}" + when: + - source_path_input is defined + - source_path_input | length > 0 + +- name: Set login + set_fact: + login: "{{ login_input }}" + when: + - login_input is defined + - login_input | length > 0 + +- name: Set vault_id + set_fact: + vault_id: "{{ vault_id_input }}" + when: + - vault_id_input is defined + - vault_id_input | length > 0 + +- name: Set target conf + set_fact: + target_conf: + oid: "{{ oid }}" + path: "{{ source_path }}" + host: "{{ host }}" + login: "{{ login }}" + vault_id: "{{ vault_id }}" + no_log: "{{ debug_logs | default('false') == 'false' }}" + + +- name: add extra_conf + set_fact: + target_conf: "{{ conf | from_json | combine(target_conf) }}" + when: + - conf is defined + - conf | length > 0 + no_log: "{{ debug_logs | default('false') == 'false' }}" + +- name: Set request body + set_fact: + request_body: + dag_run_id: "{{ login }}-{{ 1000000000 | random | to_uuid }}" + conf: "{{ target_conf }}" + no_log: "{{ debug_logs | default('false') == 'false' }}" + +- name: Import Submit DAG RUN to Airflow + include_tasks: submit_tasks.yaml diff --git a/ansible/types.yml b/ansible/types.yml index ae4d537..5c8dbd8 100644 --- a/ansible/types.yml +++ b/ansible/types.yml @@ -128,6 +128,16 @@ node_types: description: required: true default : "target_path" + input_name_for_login: + type: string + description: + required: true + default : "user_id" + input_name_for_vault_id: + type: string + description: + required: true + default : "vault_id" attributes: display_url_submit: { get_operation_output: [ SELF, tosca.interfaces.node.lifecycle.Runnable, submit, execution_url ] } display_url_start: { get_operation_output: [ SELF, Standard, start, execution_url ] } @@ -149,7 +159,7 @@ node_types: target_path: { get_property: [ SELF, target_path ] } host: { get_property: [ SELF, target_host ] } host_from_env: { get_attribute: [ REQ_TARGET, environment, cluster_login_host] } - port: "22" + port: 22 login: { get_property: [ SELF, user_id ] } vault_id: { get_property: [ SELF, vault_id ] } implementation: playbooks/start_in_standard_mode.yaml @@ -173,11 +183,110 @@ node_types: target_path_input: { get_input_nf: [get_property: [SELF, input_name_for_target_path]] } host: { get_property: [ SELF, target_host ] } host_from_env: { get_attribute: [ REQ_TARGET, environment, cluster_login_host] } - port: "22" + port: 22 login: { get_property: [ SELF, user_id ] } vault_id: { get_property: [ SELF, vault_id ] } - login_input: { get_input: user_id } - vault_id_input: { get_input: vault_id } + login_input: { get_input_nf: [get_property: [SELF, input_name_for_login]] } + vault_id_input: { get_input_nf: [get_property: [SELF, input_name_for_vault_id]] } + + dls.ansible.nodes.DLSDAGWebdavStageInData: + derived_from: dls.ansible.nodes.DLSDAGRun + metadata: + icon: airflow-icon.png + properties: + dag_id: + type: string + required: true + default: "webdav_stagein" + oid: + type: string + description: Transferred Object ID + required: false + target_host: + type: string + description: the remote host + required: false + target_path: + type: string + description: path of the file on the remote host + required: false + input_name_for_oid: + type: string + description: + required: true + default : "oid" + input_name_for_target_path: + type: string + description: + required: true + default : "target_path" + input_name_for_login: + type: string + description: + required: true + default : "user_id" + input_name_for_vault_id: + type: string + description: + required: true + default : "vault_id" + force: + type: boolean + description: Force transfer of data even if target file already exists + required: false + default: false + attributes: + display_url_submit: { get_operation_output: [ SELF, tosca.interfaces.node.lifecycle.Runnable, submit, execution_url ] } + display_url_start: { get_operation_output: [ SELF, Standard, start, execution_url ] } + interfaces: + Standard: + inputs: + run_in_standard_mode: { get_property: [ SELF, run_in_standard_mode ] } + submit_tasks: "submit_dag_webdav_stagein_data.yaml" + dls_api_url: { get_property: [ SELF, dls_api_url ] } + dls_api_url_from_env: { get_attribute: [ REQ_TARGET, environment, dls_api_url] } + dls_api_username: { get_property: [ SELF, dls_api_username ] } + dls_api_password: { get_property: [ SELF, dls_api_password ] } + dag_id: { get_property: [ SELF, dag_id ] } + debug_logs: { get_property: [ SELF, debug ] } + conf: { get_property: [ SELF, extra_conf ] } + start: + inputs: + oid: { get_property: [ SELF, oid ] } + target_path: { get_property: [ SELF, target_path ] } + force: { get_property: [ SELF, force ] } + host: { get_property: [ SELF, target_host ] } + host_from_env: { get_attribute: [ REQ_TARGET, environment, cluster_login_host] } + port: 22 + login: { get_property: [ SELF, user_id ] } + vault_id: { get_property: [ SELF, vault_id ] } + implementation: playbooks/start_in_standard_mode.yaml + + tosca.interfaces.node.lifecycle.Runnable: + inputs: + submit_tasks: "submit_dag_stagein_data.yaml" + dls_api_url: { get_property: [ SELF, dls_api_url ] } + dls_api_url_from_env: { get_attribute: [ REQ_TARGET, environment, dls_api_url] } + dls_api_username: { get_property: [ SELF, dls_api_username ] } + dls_api_password: { get_property: [ SELF, dls_api_password ] } + dag_id: { get_property: [ SELF, dag_id ] } + debug_logs: { get_property: [ SELF, debug ] } + conf: { get_property: [ SELF, extra_conf ] } + submit: + implementation: playbooks/submit_dag.yaml + inputs: + force: { get_property: [ SELF, force ] } + oid: { get_property: [ SELF, oid ] } + oid_input: { get_input_nf: [get_property: [SELF, input_name_for_oid]] } + target_path: { get_property: [ SELF, target_path ] } + target_path_input: { get_input_nf: [get_property: [SELF, input_name_for_target_path]] } + host: { get_property: [ SELF, target_host ] } + host_from_env: { get_attribute: [ REQ_TARGET, environment, cluster_login_host] } + port: 22 + login: { get_property: [ SELF, user_id ] } + vault_id: { get_property: [ SELF, vault_id ] } + login_input: { get_input_nf: [get_property: [SELF, input_name_for_login]] } + vault_id_input: { get_input_nf: [get_property: [SELF, input_name_for_vault_id]] } dls.ansible.nodes.HTTP2SSH: derived_from: dls.ansible.nodes.DLSDAGRun @@ -219,6 +328,16 @@ node_types: If an input with this name exists for the workflow, it overrides the target_path property. required: true default : "target_path" + input_name_for_login: + type: string + description: + required: true + default : "user_id" + input_name_for_vault_id: + type: string + description: + required: true + default : "vault_id" attributes: display_url_submit: { get_operation_output: [ SELF, tosca.interfaces.node.lifecycle.Runnable, submit, execution_url ] } display_url_start: { get_operation_output: [ SELF, Standard, start, execution_url ] } @@ -241,7 +360,7 @@ node_types: target_path: { get_property: [ SELF, target_path ] } host: { get_property: [ SELF, target_host ] } host_from_env: { get_attribute: [ REQ_TARGET, environment, cluster_login_host] } - port: "22" + port: 22 login: { get_property: [ SELF, user_id ] } vault_id: { get_property: [ SELF, vault_id ] } implementation: playbooks/start_in_standard_mode.yaml @@ -266,46 +385,68 @@ node_types: target_path_input: { get_input_nf: [get_property: [SELF, input_name_for_target_path]] } host: { get_property: [ SELF, target_host ] } host_from_env: { get_attribute: [ REQ_TARGET, environment, cluster_login_host] } - port: "22" + port: 22 login: { get_property: [ SELF, user_id ] } vault_id: { get_property: [ SELF, vault_id ] } - login_input: { get_input: user_id } - vault_id_input: { get_input: vault_id } - - dls.ansible.nodes.DLSDAGStageOutData: + login_input: { get_input_nf: [get_property: [SELF, input_name_for_login]] } + vault_id_input: { get_input_nf: [get_property: [SELF, input_name_for_vault_id]] } + + dls.ansible.nodes.GIT2SSH: derived_from: dls.ansible.nodes.DLSDAGRun metadata: icon: airflow-icon.png properties: - mid: + dag_id: type: string - description: Uploaded Metadata ID + required: true + default: git2ssh + git_url: + type: string + description: URL of the git server + required: false + git_repo: + type: string + description: Repository in the git server required: false target_host: type: string description: the remote host required: false - source_path: + target_path: type: string description: path of the file on the remote host required: false - register: - type: boolean - description: Should the record created in b2share be registered with data cat - required: false - default: false - input_name_for_mid: + input_name_for_git_url: type: string + description: > + Name of the workflow input to use to retrieve the URL. + If an input with this name exists for the workflow, it overrides the url property. required: true - default: mid - input_name_for_source_path: + default : "git_url" + input_name_for_git_repo: type: string + description: > + Name of the workflow input to use to retrieve the git repo. + If an input with this name exists for the workflow, it overrides the url property. required: true - default: source_path - input_name_for_register: + default : "git_repo" + input_name_for_target_path: type: string + description: > + Name of the workflow input to use to retrieve the target path. + If an input with this name exists for the workflow, it overrides the target_path property. required: true - default: register + default : "target_path" + input_name_for_login: + type: string + description: + required: true + default : "user_id" + input_name_for_vault_id: + type: string + description: + required: true + default : "vault_id" attributes: display_url_submit: { get_operation_output: [ SELF, tosca.interfaces.node.lifecycle.Runnable, submit, execution_url ] } display_url_start: { get_operation_output: [ SELF, Standard, start, execution_url ] } @@ -313,7 +454,7 @@ node_types: Standard: inputs: run_in_standard_mode: { get_property: [ SELF, run_in_standard_mode ] } - submit_tasks: "submit_dag_stageout_data.yaml" + submit_tasks: "submit_dag_git2ssh_data.yaml" dls_api_url: { get_property: [ SELF, dls_api_url ] } dls_api_url_from_env: { get_attribute: [ REQ_TARGET, environment, dls_api_url] } dls_api_username: { get_property: [ SELF, dls_api_username ] } @@ -322,20 +463,20 @@ node_types: debug_logs: { get_property: [ SELF, debug ] } conf: { get_property: [ SELF, extra_conf ] } start: - implementation: playbooks/start_in_standard_mode.yaml inputs: - mid: { get_property: [ SELF, mid ] } - source_path: { get_property: [ SELF, source_path ] } - register: { get_property: [ SELF, register ] } + git_url: { get_property: [ SELF, git_url ] } + git_repo: { get_property: [ SELF, git_repo ] } + target_path: { get_property: [ SELF, target_path ] } host: { get_property: [ SELF, target_host ] } host_from_env: { get_attribute: [ REQ_TARGET, environment, cluster_login_host] } - port: "22" + port: 22 login: { get_property: [ SELF, user_id ] } vault_id: { get_property: [ SELF, vault_id ] } + implementation: playbooks/start_in_standard_mode.yaml tosca.interfaces.node.lifecycle.Runnable: inputs: - submit_tasks: "submit_dag_stageout_data.yaml" + submit_tasks: "submit_dag_git2ssh_data.yaml" dls_api_url: { get_property: [ SELF, dls_api_url ] } dls_api_url_from_env: { get_attribute: [ REQ_TARGET, environment, dls_api_url] } dls_api_username: { get_property: [ SELF, dls_api_username ] } @@ -346,19 +487,628 @@ node_types: submit: implementation: playbooks/submit_dag.yaml inputs: - mid: { get_property: [ SELF, mid ] } - mid_input: { get_input_nf: [get_property: [SELF, input_name_for_mid]] } - source_path: { get_property: [ SELF, source_path ] } - source_path_input: { get_input_nf: [get_property: [SELF, input_name_for_source_path]] } - register: { get_property: [ SELF, register ] } - register_input: { get_input_nf: [get_property: [SELF, input_name_for_register]] } + git_repo: { get_property: [ SELF, git_repo ] } + git_repo_input: { get_input_nf: [get_property: [SELF, input_name_for_git_repo]] } + git_url: { get_property: [ SELF, git_url ] } + git_url_input: { get_input_nf: [get_property: [SELF, input_name_for_git_url]] } + target_path: { get_property: [ SELF, target_path ] } + target_path_input: { get_input_nf: [get_property: [SELF, input_name_for_target_path]] } host: { get_property: [ SELF, target_host ] } host_from_env: { get_attribute: [ REQ_TARGET, environment, cluster_login_host] } - port: "22" + port: 22 login: { get_property: [ SELF, user_id ] } vault_id: { get_property: [ SELF, vault_id ] } - login_input: { get_input: user_id } - vault_id_input: { get_input: vault_id } + login_input: { get_input_nf: [get_property: [SELF, input_name_for_login]] } + vault_id_input: { get_input_nf: [get_property: [SELF, input_name_for_vault_id]] } + + dls.ansible.nodes.SSH2SSH: + derived_from: dls.ansible.nodes.DLSDAGRun + metadata: + icon: airflow-icon.png + properties: + dag_id: + type: string + required: true + default: ssh2ssh + source_host: + type: string + description: URL of the source host + required: false + source_path: + type: string + description: Path of the file in the source host + required: false + source_user_id: + type: string + description: Username in the source host + required: false + source_vault_id: + type: string + description: vault_id in the source host + required: false + target_host: + type: string + description: the remote host + required: false + target_path: + type: string + description: path of the file on the targat host + required: false + input_name_for_source_path: + type: string + description: > + Name of the workflow input to use as source_path. + If an input with this name exists for the workflow, it overrides the url property. + required: true + default : "source_path" + input_name_for_source_user_id: + type: string + description: > + Name of the workflow input to use as source_user_id. + If an input with this name exists for the workflow, it overrides the url property. + required: true + default : "source_user_id" + input_name_for_source_vault_id: + type: string + description: > + Name of the workflow input to use as source_vault_id. + If an input with this name exists for the workflow, it overrides the url property. + required: true + default : "source_vault_id" + input_name_for_target_path: + type: string + description: > + Name of the workflow input to use to retrieve the target path. + If an input with this name exists for the workflow, it overrides the target_path property. + required: true + default : "target_path" + input_name_for_target_user_id: + type: string + description: > + Name of the workflow input to use as target_user_id. + If an input with this name exists for the workflow, it overrides the url property. + required: true + default : "target_user_id" + input_name_for_target_vault_id: + type: string + description: > + Name of the workflow input to use as target_vault_id. + If an input with this name exists for the workflow, it overrides the url property. + required: true + default : "target_vault_id" + + requirements: + - source_environment: + capability: eflows4hpc.env.capabilities.ExecutionEnvironment + relationship: tosca.relationships.DependsOn + occurrences: [ 0, UNBOUNDED ] + attributes: + display_url_submit: { get_operation_output: [ SELF, tosca.interfaces.node.lifecycle.Runnable, submit, execution_url ] } + display_url_start: { get_operation_output: [ SELF, Standard, start, execution_url ] } + interfaces: + Standard: + inputs: + run_in_standard_mode: { get_property: [ SELF, run_in_standard_mode ] } + submit_tasks: "submit_dag_ssh2ssh_data.yaml" + dls_api_url: { get_property: [ SELF, dls_api_url ] } + dls_api_url_from_env: { get_attribute: [ REQ_TARGET, environment, dls_api_url] } + dls_api_username: { get_property: [ SELF, dls_api_username ] } + dls_api_password: { get_property: [ SELF, dls_api_password ] } + dag_id: { get_property: [ SELF, dag_id ] } + debug_logs: { get_property: [ SELF, debug ] } + conf: { get_property: [ SELF, extra_conf ] } + start: + inputs: + source_path: { get_property: [ SELF, source_path ] } + source_host: { get_property: [ SELF, source_host ] } + source_host_from_env: { get_attribute: [ REQ_TARGET, source_environment, cluster_login_host] } + target_path: { get_property: [ SELF, target_path ] } + target_host: { get_property: [ SELF, target_host ] } + target_host_from_env: { get_attribute: [ REQ_TARGET, environment, cluster_login_host] } + target_port: 22 + source_port: 22 + target_login: { get_property: [ SELF, user_id ] } + source_login: { get_property: [ SELF, source_user_id ] } + target_vault_id: { get_property: [ SELF, vault_id ] } + source_vault_id: { get_property: [ SELF, source_vault_id ] } + implementation: playbooks/start_in_standard_mode.yaml + + tosca.interfaces.node.lifecycle.Runnable: + inputs: + submit_tasks: "submit_dag_ssh2ssh_data.yaml" + dls_api_url: { get_property: [ SELF, dls_api_url ] } + dls_api_url_from_env: { get_attribute: [ REQ_TARGET, environment, dls_api_url] } + dls_api_username: { get_property: [ SELF, dls_api_username ] } + dls_api_password: { get_property: [ SELF, dls_api_password ] } + dag_id: { get_property: [ SELF, dag_id ] } + debug_logs: { get_property: [ SELF, debug ] } + conf: { get_property: [ SELF, extra_conf ] } + submit: + implementation: playbooks/submit_dag.yaml + inputs: + source_path: { get_property: [ SELF, source_path ] } + source_path_input: { get_input_nf: [get_property: [SELF, input_name_for_source_path]] } + source_host: { get_property: [ SELF, source_host ] } + source_host_from_env: { get_attribute: [ REQ_TARGET, source_environment, cluster_login_host] } + source_port: 22 + source_login: { get_property: [ SELF, source_user_id ] } + source_vault_id: { get_property: [ SELF, source_vault_id ] } + source_login_input: { get_input_nf: [get_property: [SELF, input_name_for_source_user_id]] } + source_vault_id_input: { get_input_nf: [get_property: [SELF, input_name_for_source_vault_id]] } + target_path: { get_property: [ SELF, target_path ] } + target_path_input: { get_input_nf: [get_property: [SELF, input_name_for_target_path]] } + target_host: { get_property: [ SELF, target_host ] } + target_host_from_env: { get_attribute: [ REQ_TARGET, environment, cluster_login_host] } + target_port: 22 + target_login: { get_property: [ SELF, user_id ] } + target_vault_id: { get_property: [ SELF, vault_id ] } + target_login_input: { get_input_nf: [get_property: [SELF, input_name_for_target_user_id]] } + target_vault_id_input: { get_input_nf: [get_property: [SELF, input_name_for_target_vault_id]] } + + + dls.ansible.nodes.DLSDAGWebdavStageOutData: + derived_from: dls.ansible.nodes.DLSDAGRun + metadata: + icon: airflow-icon.png + properties: + dag_id: + type: string + required: true + default: "webdav_stageout" + oid: + type: string + description: Object ID of the data-set where to include the data + required: false + target_host: + type: string + description: the remote host + required: false + source_path: + type: string + description: path of the file on the remote host + required: false + input_name_for_oid: + type: string + required: true + default: oid + input_name_for_source_path: + type: string + required: true + default: source_path + input_name_for_login: + type: string + description: + required: true + default : "user_id" + input_name_for_vault_id: + type: string + description: + required: true + default : "vault_id" + attributes: + display_url_submit: { get_operation_output: [ SELF, tosca.interfaces.node.lifecycle.Runnable, submit, execution_url ] } + display_url_start: { get_operation_output: [ SELF, Standard, start, execution_url ] } + interfaces: + Standard: + inputs: + run_in_standard_mode: { get_property: [ SELF, run_in_standard_mode ] } + submit_tasks: "submit_dag_webdav_stageout_data.yaml" + dls_api_url: { get_property: [ SELF, dls_api_url ] } + dls_api_url_from_env: { get_attribute: [ REQ_TARGET, environment, dls_api_url] } + dls_api_username: { get_property: [ SELF, dls_api_username ] } + dls_api_password: { get_property: [ SELF, dls_api_password ] } + dag_id: { get_property: [ SELF, dag_id ] } + debug_logs: { get_property: [ SELF, debug ] } + conf: { get_property: [ SELF, extra_conf ] } + start: + implementation: playbooks/start_in_standard_mode.yaml + inputs: + oid: { get_property: [ SELF, oid ] } + source_path: { get_property: [ SELF, source_path ] } + host: { get_property: [ SELF, target_host ] } + host_from_env: { get_attribute: [ REQ_TARGET, environment, cluster_login_host] } + port: 22 + login: { get_property: [ SELF, user_id ] } + vault_id: { get_property: [ SELF, vault_id ] } + + tosca.interfaces.node.lifecycle.Runnable: + inputs: + submit_tasks: "submit_dag_webdav_stageout_data.yaml" + dls_api_url: { get_property: [ SELF, dls_api_url ] } + dls_api_url_from_env: { get_attribute: [ REQ_TARGET, environment, dls_api_url] } + dls_api_username: { get_property: [ SELF, dls_api_username ] } + dls_api_password: { get_property: [ SELF, dls_api_password ] } + dag_id: { get_property: [ SELF, dag_id ] } + debug_logs: { get_property: [ SELF, debug ] } + conf: { get_property: [ SELF, extra_conf ] } + submit: + implementation: playbooks/submit_dag.yaml + inputs: + oid: { get_property: [ SELF, oid ] } + oid_input: { get_input_nf: [get_property: [SELF, input_name_for_oid]] } + source_path: { get_property: [ SELF, source_path ] } + source_path_input: { get_input_nf: [get_property: [SELF, input_name_for_source_path]] } + host: { get_property: [ SELF, target_host ] } + host_from_env: { get_attribute: [ REQ_TARGET, environment, cluster_login_host] } + port: 22 + login: { get_property: [ SELF, user_id ] } + vault_id: { get_property: [ SELF, vault_id ] } + login_input: { get_input_nf: [get_property: [SELF, input_name_for_login]] } + vault_id_input: { get_input_nf: [get_property: [SELF, input_name_for_vault_id]] } + + dls.ansible.nodes.DLSDAGModelUpload: + derived_from: dls.ansible.nodes.DLSDAGRun + metadata: + icon: airflow-icon.png + properties: + dag_id: + type: string + required: true + default: "mlflow_upload" + target_host: + type: string + description: the remote host + required: false + location: + type: string + description: path of the file on the remote host + required: false + subfolder: + type: string + description: subfolder within location + required: false + input_name_for_location: + type: string + required: true + default: location + input_name_for_login: + type: string + description: + required: true + default : "user_id" + input_name_for_vault_id: + type: string + description: + required: true + default : "vault_id" + attributes: + display_url_submit: { get_operation_output: [ SELF, tosca.interfaces.node.lifecycle.Runnable, submit, execution_url ] } + display_url_start: { get_operation_output: [ SELF, Standard, start, execution_url ] } + interfaces: + Standard: + inputs: + run_in_standard_mode: { get_property: [ SELF, run_in_standard_mode ] } + submit_tasks: "submit_dag_mlflow_upload.yaml" + dls_api_url: { get_property: [ SELF, dls_api_url ] } + dls_api_url_from_env: { get_attribute: [ REQ_TARGET, environment, dls_api_url] } + dls_api_username: { get_property: [ SELF, dls_api_username ] } + dls_api_password: { get_property: [ SELF, dls_api_password ] } + dag_id: { get_property: [ SELF, dag_id ] } + debug_logs: { get_property: [ SELF, debug ] } + conf: { get_property: [ SELF, extra_conf ] } + start: + implementation: playbooks/start_in_standard_mode.yaml + inputs: + location: { get_property: [ SELF, location ] } + subfolder: { get_property: [ SELF, subfolder ] } + host: { get_property: [ SELF, target_host ] } + host_from_env: { get_attribute: [ REQ_TARGET, environment, cluster_login_host] } + port: 22 + login: { get_property: [ SELF, user_id ] } + vault_id: { get_property: [ SELF, vault_id ] } + + tosca.interfaces.node.lifecycle.Runnable: + inputs: + submit_tasks: "submit_dag_mlflow_upload.yaml" + dls_api_url: { get_property: [ SELF, dls_api_url ] } + dls_api_url_from_env: { get_attribute: [ REQ_TARGET, environment, dls_api_url] } + dls_api_username: { get_property: [ SELF, dls_api_username ] } + dls_api_password: { get_property: [ SELF, dls_api_password ] } + dag_id: { get_property: [ SELF, dag_id ] } + debug_logs: { get_property: [ SELF, debug ] } + conf: { get_property: [ SELF, extra_conf ] } + submit: + implementation: playbooks/submit_dag.yaml + inputs: + location: { get_property: [ SELF, location ] } + subfolder: { get_property: [ SELF, subfolder ] } + location_input: { get_input_nf: [get_property: [SELF, input_name_for_location]] } + host: { get_property: [ SELF, target_host ] } + host_from_env: { get_attribute: [ REQ_TARGET, environment, cluster_login_host] } + port: 22 + login: { get_property: [ SELF, user_id ] } + vault_id: { get_property: [ SELF, vault_id ] } + login_input: { get_input_nf: [get_property: [SELF, input_name_for_login]] } + vault_id_input: { get_input_nf: [get_property: [SELF, input_name_for_vault_id]] } + + dls.ansible.nodes.DLSDAGModelSearchUpload: + derived_from: dls.ansible.nodes.DLSDAGRun + metadata: + icon: airflow-icon.png + properties: + dag_id: + type: string + required: true + default: "model_search_upload" + target_host: + type: string + description: the remote host + required: false + location: + type: string + description: path of the file on the remote host + required: false + experiment: + type: string + description: name for the experiment + required: false + input_name_for_experiment: + type: string + required: true + default: "experiment" + input_name_for_location: + type: string + required: true + default: "location" + input_name_for_login: + type: string + description: + required: true + default : "user_id" + input_name_for_vault_id: + type: string + description: + required: true + default : "vault_id" + attributes: + display_url_submit: { get_operation_output: [ SELF, tosca.interfaces.node.lifecycle.Runnable, submit, execution_url ] } + display_url_start: { get_operation_output: [ SELF, Standard, start, execution_url ] } + interfaces: + Standard: + inputs: + run_in_standard_mode: { get_property: [ SELF, run_in_standard_mode ] } + submit_tasks: "submit_dag_model_search_upload.yaml" + dls_api_url: { get_property: [ SELF, dls_api_url ] } + dls_api_url_from_env: { get_attribute: [ REQ_TARGET, environment, dls_api_url] } + dls_api_username: { get_property: [ SELF, dls_api_username ] } + dls_api_password: { get_property: [ SELF, dls_api_password ] } + dag_id: { get_property: [ SELF, dag_id ] } + debug_logs: { get_property: [ SELF, debug ] } + conf: { get_property: [ SELF, extra_conf ] } + start: + implementation: playbooks/start_in_standard_mode.yaml + inputs: + location: { get_property: [ SELF, location ] } + experiment: { get_property: [ SELF, experiment ] } + host: { get_property: [ SELF, target_host ] } + host_from_env: { get_attribute: [ REQ_TARGET, environment, cluster_login_host] } + port: 22 + login: { get_property: [ SELF, user_id ] } + vault_id: { get_property: [ SELF, vault_id ] } + + tosca.interfaces.node.lifecycle.Runnable: + inputs: + submit_tasks: "submit_dag_model_search_upload.yaml" + dls_api_url: { get_property: [ SELF, dls_api_url ] } + dls_api_url_from_env: { get_attribute: [ REQ_TARGET, environment, dls_api_url] } + dls_api_username: { get_property: [ SELF, dls_api_username ] } + dls_api_password: { get_property: [ SELF, dls_api_password ] } + dag_id: { get_property: [ SELF, dag_id ] } + debug_logs: { get_property: [ SELF, debug ] } + conf: { get_property: [ SELF, extra_conf ] } + submit: + implementation: playbooks/submit_dag.yaml + inputs: + location: { get_property: [ SELF, location ] } + experiment: { get_property: [ SELF, experiment ] } + experiment_input: { get_input_nf: [get_property: [SELF, input_name_for_experiment]] } + location_input: { get_input_nf: [get_property: [SELF, input_name_for_location]] } + host: { get_property: [ SELF, target_host ] } + host_from_env: { get_attribute: [ REQ_TARGET, environment, cluster_login_host] } + port: 22 + login: { get_property: [ SELF, user_id ] } + vault_id: { get_property: [ SELF, vault_id ] } + login_input: { get_input_nf: [get_property: [SELF, input_name_for_login]] } + vault_id_input: { get_input_nf: [get_property: [SELF, input_name_for_vault_id]] } + + dls.ansible.nodes.DLSDAGModelStageIn: + derived_from: dls.ansible.nodes.DLSDAGRun + metadata: + icon: airflow-icon.png + properties: + dag_id: + type: string + required: true + default: "model_stagein" + target_host: + type: string + description: the remote host + required: false + location: + type: string + description: path of the file on the remote host + required: false + experiment: + type: string + description: run id for the experiment + required: false + model_path: + type: string + description: path where model is stored inside the experiment + required: false + input_name_for_model_path: + type: string + required: true + default: "model_path" + input_name_for_experiment: + type: string + required: true + default: "experiment" + input_name_for_location: + type: string + required: true + default: "location" + input_name_for_login: + type: string + description: + required: true + default : "user_id" + input_name_for_vault_id: + type: string + description: + required: true + default : "vault_id" + attributes: + display_url_submit: { get_operation_output: [ SELF, tosca.interfaces.node.lifecycle.Runnable, submit, execution_url ] } + display_url_start: { get_operation_output: [ SELF, Standard, start, execution_url ] } + interfaces: + Standard: + inputs: + run_in_standard_mode: { get_property: [ SELF, run_in_standard_mode ] } + submit_tasks: "submit_dag_model_stagein.yaml" + dls_api_url: { get_property: [ SELF, dls_api_url ] } + dls_api_url_from_env: { get_attribute: [ REQ_TARGET, environment, dls_api_url] } + dls_api_username: { get_property: [ SELF, dls_api_username ] } + dls_api_password: { get_property: [ SELF, dls_api_password ] } + dag_id: { get_property: [ SELF, dag_id ] } + debug_logs: { get_property: [ SELF, debug ] } + conf: { get_property: [ SELF, extra_conf ] } + start: + implementation: playbooks/start_in_standard_mode.yaml + inputs: + location: { get_property: [ SELF, location ] } + experiment: { get_property: [ SELF, experiment ] } + model_path: { get_property: [ SELF, model_path ] } + host: { get_property: [ SELF, target_host ] } + host_from_env: { get_attribute: [ REQ_TARGET, environment, cluster_login_host] } + port: 22 + login: { get_property: [ SELF, user_id ] } + vault_id: { get_property: [ SELF, vault_id ] } + + tosca.interfaces.node.lifecycle.Runnable: + inputs: + submit_tasks: "submit_dag_model_stagein.yaml" + dls_api_url: { get_property: [ SELF, dls_api_url ] } + dls_api_url_from_env: { get_attribute: [ REQ_TARGET, environment, dls_api_url] } + dls_api_username: { get_property: [ SELF, dls_api_username ] } + dls_api_password: { get_property: [ SELF, dls_api_password ] } + dag_id: { get_property: [ SELF, dag_id ] } + debug_logs: { get_property: [ SELF, debug ] } + conf: { get_property: [ SELF, extra_conf ] } + submit: + implementation: playbooks/submit_dag.yaml + inputs: + location: { get_property: [ SELF, location ] } + experiment: { get_property: [ SELF, experiment ] } + model_path: { get_property: [ SELF, model_path ] } + experiment_input: { get_input_nf: [get_property: [SELF, input_name_for_experiment]] } + location_input: { get_input_nf: [get_property: [SELF, input_name_for_location]] } + model_path_input: { get_input_nf: [get_property: [SELF, input_name_for_model_path]] } + host: { get_property: [ SELF, target_host ] } + host_from_env: { get_attribute: [ REQ_TARGET, environment, cluster_login_host] } + port: 22 + login: { get_property: [ SELF, user_id ] } + vault_id: { get_property: [ SELF, vault_id ] } + login_input: { get_input_nf: [get_property: [SELF, input_name_for_login]] } + vault_id_input: { get_input_nf: [get_property: [SELF, input_name_for_vault_id]] } + + dls.ansible.nodes.DLSDAGStageOutData: + derived_from: dls.ansible.nodes.DLSDAGRun + metadata: + icon: airflow-icon.png + properties: + mid: + type: string + description: Uploaded Metadata ID + required: false + target_host: + type: string + description: the remote host + required: false + source_path: + type: string + description: path of the file on the remote host + required: false + register: + type: boolean + description: Should the record created in b2share be registered with data cat + required: false + default: false + input_name_for_mid: + type: string + required: true + default: mid + input_name_for_source_path: + type: string + required: true + default: source_path + input_name_for_register: + type: string + required: true + default: register + input_name_for_login: + type: string + description: + required: true + default : "user_id" + input_name_for_vault_id: + type: string + description: + required: true + default : "vault_id" + attributes: + display_url_submit: { get_operation_output: [ SELF, tosca.interfaces.node.lifecycle.Runnable, submit, execution_url ] } + display_url_start: { get_operation_output: [ SELF, Standard, start, execution_url ] } + interfaces: + Standard: + inputs: + run_in_standard_mode: { get_property: [ SELF, run_in_standard_mode ] } + submit_tasks: "submit_dag_stageout_data.yaml" + dls_api_url: { get_property: [ SELF, dls_api_url ] } + dls_api_url_from_env: { get_attribute: [ REQ_TARGET, environment, dls_api_url] } + dls_api_username: { get_property: [ SELF, dls_api_username ] } + dls_api_password: { get_property: [ SELF, dls_api_password ] } + dag_id: { get_property: [ SELF, dag_id ] } + debug_logs: { get_property: [ SELF, debug ] } + conf: { get_property: [ SELF, extra_conf ] } + start: + implementation: playbooks/start_in_standard_mode.yaml + inputs: + mid: { get_property: [ SELF, mid ] } + source_path: { get_property: [ SELF, source_path ] } + register: { get_property: [ SELF, register ] } + host: { get_property: [ SELF, target_host ] } + host_from_env: { get_attribute: [ REQ_TARGET, environment, cluster_login_host] } + port: 22 + login: { get_property: [ SELF, user_id ] } + vault_id: { get_property: [ SELF, vault_id ] } + + tosca.interfaces.node.lifecycle.Runnable: + inputs: + submit_tasks: "submit_dag_stageout_data.yaml" + dls_api_url: { get_property: [ SELF, dls_api_url ] } + dls_api_url_from_env: { get_attribute: [ REQ_TARGET, environment, dls_api_url] } + dls_api_username: { get_property: [ SELF, dls_api_username ] } + dls_api_password: { get_property: [ SELF, dls_api_password ] } + dag_id: { get_property: [ SELF, dag_id ] } + debug_logs: { get_property: [ SELF, debug ] } + conf: { get_property: [ SELF, extra_conf ] } + submit: + implementation: playbooks/submit_dag.yaml + inputs: + mid: { get_property: [ SELF, mid ] } + mid_input: { get_input_nf: [get_property: [SELF, input_name_for_mid]] } + source_path: { get_property: [ SELF, source_path ] } + source_path_input: { get_input_nf: [get_property: [SELF, input_name_for_source_path]] } + register: { get_property: [ SELF, register ] } + register_input: { get_input_nf: [get_property: [SELF, input_name_for_register]] } + host: { get_property: [ SELF, target_host ] } + host_from_env: { get_attribute: [ REQ_TARGET, environment, cluster_login_host] } + port: 22 + login: { get_property: [ SELF, user_id ] } + vault_id: { get_property: [ SELF, vault_id ] } + login_input: { get_input_nf: [get_property: [SELF, input_name_for_login]] } + vault_id_input: { get_input_nf: [get_property: [SELF, input_name_for_vault_id]] } dls.ansible.nodes.DLSDAGImageTransfer: derived_from: dls.ansible.nodes.DLSDAGRun @@ -418,7 +1168,7 @@ node_types: target_path: { get_property: [ SELF, target_path ] } host: { get_property: [ SELF, target_host ] } host_from_env: { get_attribute: [ REQ_TARGET, environment, cluster_login_host] } - port: "22" + port: 22 login: { get_property: [ SELF, user_id ] } vault_id: { get_property: [ SELF, vault_id ] } @@ -445,7 +1195,7 @@ node_types: target_path_input: { get_input: image_target_path } host: { get_property: [ SELF, target_host ] } host_from_env: { get_attribute: [ REQ_TARGET, environment, cluster_login_host] } - port: "22" + port: 22 login: { get_property: [ SELF, user_id ] } vault_id: { get_property: [ SELF, vault_id ] } login_input: { get_input: user_id }