From 0fbd0891633978ef3f1e90506d4cbad989edef60 Mon Sep 17 00:00:00 2001 From: Jacob Goldverg Date: Mon, 11 Nov 2024 23:41:02 -0500 Subject: [PATCH] Support for the carbon, file transfer node, and new sla added --- _version.py | 2 +- odscli/ods_cli.py | 6 +- odscli/sdk/carbon_scheduler.py | 154 ++++++++++++++++++ odscli/sdk/carbon_scheduler_gui.py | 67 ++++++++ odscli/sdk/constants.py | 21 +++ odscli/sdk/ftn_nodes.py | 69 ++++++++ odscli/sdk/meta_query.py | 2 +- odscli/sdk/{schedule_job.py => scheduler.py} | 136 ++++++++++++---- requirements.txt | 2 +- ...pTaccNginx_to_jgoldverg@gmail.com-mac.json | 11 +- 10 files changed, 434 insertions(+), 36 deletions(-) create mode 100644 odscli/sdk/carbon_scheduler.py create mode 100644 odscli/sdk/carbon_scheduler_gui.py create mode 100644 odscli/sdk/ftn_nodes.py rename odscli/sdk/{schedule_job.py => scheduler.py} (54%) diff --git a/_version.py b/_version.py index 33a5076..3830112 100644 --- a/_version.py +++ b/_version.py @@ -1 +1 @@ -__version__ = "1.1.4" \ No newline at end of file +__version__ = "1.1.5" \ No newline at end of file diff --git a/odscli/ods_cli.py b/odscli/ods_cli.py index ab738d2..fd783de 100644 --- a/odscli/ods_cli.py +++ b/odscli/ods_cli.py @@ -1,12 +1,14 @@ import click -from odscli.sdk.schedule_job import schedule_cli +from odscli.sdk.scheduler import schedule_cli from odscli.sdk.credential import credential_cli from odscli.sdk.endpoint_management import endpoint_cli from odscli.sdk.query_transfer_data import query_cli from odscli.sdk.login import auth_cli from odscli.sdk.measure import measure_cli +from odscli.sdk.ftn_nodes import nodes_cli +from odscli.sdk.carbon_scheduler import carbon_cli -odscli = click.CommandCollection(sources=[schedule_cli, credential_cli, endpoint_cli, query_cli, auth_cli, measure_cli]) +odscli = click.CommandCollection(sources=[schedule_cli, credential_cli, endpoint_cli, query_cli, auth_cli, measure_cli, nodes_cli, carbon_cli]) def main(): odscli() diff --git a/odscli/sdk/carbon_scheduler.py b/odscli/sdk/carbon_scheduler.py new file mode 100644 index 0000000..865d70e --- /dev/null +++ b/odscli/sdk/carbon_scheduler.py @@ -0,0 +1,154 @@ +import json +from datetime import datetime +from inspect import trace + +from rich.columns import Columns +from rich.panel import Panel + +from odscli.sdk import token_utils +import odscli.sdk.constants as constants +import requests +import click +from rich.console import Console +import odscli.sdk.carbon_scheduler_gui as scheduler_gui + +console = Console() + + +@click.group('carbon_cli') +@click.pass_context +def carbon_cli(): + pass + + +@carbon_cli.group('carbon') +def carbon(): + pass + + +@carbon.command('entries', help="List the measurements for a job measured on a given node") +@click.argument('job_uuid', type=click.UUID) +@click.argument('transfer_node_name', type=click.STRING) +@click.option('--save_to_file', type=click.Path(), default=None, help="File path to save the output. Format is in json") +def queryCarbonEntries(job_uuid, transfer_node_name, save_to_file): + host, user, token = token_utils.readConfig() + uri = constants.ODS_PROTOCOL + host + constants.CARBON_API + constants.CARBON_NODE_AND_JOB.format( + transferNodeName=transfer_node_name, jobUuid=job_uuid) + cookies = dict(ATOKEN=token) + headers = {"Authorization": "Bearer " + token + ""} + data = send_request(uri=uri, cookies=cookies, headers=headers, params={}) + if data is None: return + + if save_to_file: + save_data_to_file(data, save_to_file) + scheduler_gui.buildMainCarbonTable(data, console) + + +@carbon.command('user', help="List carbon measurements for a user across all jobs and nodes") +@click.option('--save_to_file', type=click.Path(), default=None, help="File path to save the output. Format is in json") +def getCarbonEntriesForUser(save_to_file): + host, user, token = token_utils.readConfig() + uri = constants.ODS_PROTOCOL + host + constants.CARBON_API + constants.CARBON_USER + cookies = dict(ATOKEN=token) + headers = {"Authorization": "Bearer " + token + ""} + + data = send_request(uri, cookies, headers, {}) + if data is None: return + + if save_to_file: + save_data_to_file(data, save_to_file) + + data_sorted = sorted(data, key=lambda job: datetime.fromisoformat(job["timeMeasuredAt"])) + scheduler_gui.buildMainCarbonTable(data_sorted, console) + + +@carbon.command("job", help="List all carbon measurements for a job") +@click.argument('job_uuid', type=click.UUID) +@click.option('--save_to_file', type=click.Path(), default=None, help="File path to save the output. Format is in json") +def job_measurements(job_uuid, save_to_file): + host, user, token = token_utils.readConfig() + uri = constants.ODS_PROTOCOL + host + constants.CARBON_API + constants.CARBON_JOB.format(jobUuid=job_uuid) + cookies = dict(ATOKEN=token) + headers = {"Authorization": "Bearer " + token + ""} + data = send_request(uri, cookies, headers, params={}) + print(data) + if data is None: return + if save_to_file: save_data_to_file(data, save_to_file) + + scheduler_gui.buildMainCarbonTable(data, console) + + +@carbon.command("latest", help="Get latest carbon measurement for scheduled job") +@click.argument('job_uuid', type=click.UUID) +@click.option('--save_to_file', type=click.Path(), default=None, help="File path to save the output. Format is in json") +def query_latest_job_measurement(job_uuid, save_to_file): + host, user, token = token_utils.readConfig() + uri = constants.ODS_PROTOCOL + host + constants.CARBON_API + constants.CARBON_LATEST.format(jobUuid=job_uuid) + cookies = dict(ATOKEN=token) + headers = {"Authorization": "Bearer " + token + ""} + data = send_request(uri, cookies, headers, params={}) + if data is None: return + if save_to_file: save_data_to_file(data, save_to_file) + + trace_route_table = scheduler_gui.buildTraceRouteTable(data['transferNodeName'], data['jobUuid'], + data['timeMeasuredAt'], data['traceRouteCarbon']) + console.print(trace_route_table) + + +@carbon.command("node", help="Get the carbon measurements produced by a Node") +@click.argument('transfer_node_name', type=click.STRING) +@click.option('--save_to_file', type=click.Path(), default=None, help="File path to save the output. Format is in json") +def query_measurements_made_by_node(transfer_node_name, save_to_file): + host, user, token = token_utils.readConfig() + uri = constants.ODS_PROTOCOL + host + constants.CARBON_API + constants.CARBON_NODE.format( + transferNodeName=transfer_node_name) + cookies = dict(ATOKEN=token) + headers = {"Authorization": "Bearer " + token + ""} + data = send_request(uri, cookies, headers, params={}) + if data is None: return + if save_to_file: save_data_to_file(data, save_to_file) + + scheduler_gui.buildMainCarbonTable(data, console) + + +@carbon.command("result", + help="The initial and final measurement for a scheduled job. Allows to compare the carbon intensity of the job that caused it to launch") +@click.argument('job_uuid', type=click.UUID) +@click.option('--save_to_file', type=click.Path(), default=None, help="File path to save the output. Format is in json") +def query_result(job_uuid, save_to_file): + host, user, token = token_utils.readConfig() + uri = constants.ODS_PROTOCOL + host + constants.CARBON_API + constants.CARBON_RESULT.format(job_uuid=job_uuid) + cookies = dict(ATOKEN=token) + headers = {"Authorization": "Bearer " + token + ""} + data = send_request(uri, cookies, headers, params={}) + if data is None: return + if save_to_file: save_data_to_file(data, save_to_file) + start_json = data['start'] + end_json = data['end'] + start_table = scheduler_gui.buildTraceRouteTable(transferNodeName=start_json['transferNodeName'], + jobUuid=start_json['jobUuid'], + timeMeasuredAt=start_json['timeMeasuredAt'], + trace_route_data=start_json['traceRouteCarbon']) + end_table = scheduler_gui.buildTraceRouteTable(transferNodeName=end_json['transferNodeName'], + jobUuid=end_json['jobUuid'], + timeMeasuredAt=end_json['timeMeasuredAt'], + trace_route_data=end_json['traceRouteCarbon']) + columns = Columns([Panel(start_table, title="Initial Job Measurement"), + Panel(end_table, title="Last Job Measurement Before Job Execution")]) + console.print(columns) + + +def send_request(uri, cookies, headers, params): + try: + console.print(f"Sending request: {uri}") + resp = requests.get(uri, cookies=cookies, headers=headers, params=params) + return resp.json() + except requests.RequestException as e: + console.print(f"[red]Error fetching data: {e.errno} ") + return None + + +def save_data_to_file(data, file_path): + with open(file_path, 'w') as file: + json.dump(data, file, indent=2) + console.print(f"[green]Data saved to {file_path}[/green]") diff --git a/odscli/sdk/carbon_scheduler_gui.py b/odscli/sdk/carbon_scheduler_gui.py new file mode 100644 index 0000000..d20b696 --- /dev/null +++ b/odscli/sdk/carbon_scheduler_gui.py @@ -0,0 +1,67 @@ +from rich.columns import Columns +from rich.table import Table +from rich.panel import Panel +from rich.console import Console + + +def buildMainCarbonTable(data, console:Console): + terminal_width = console.width + + # Estimate width per subtable (you can adjust this based on actual subtable width) + subtable_width_estimate = 50 # Approximate width per subtable + + # Calculate the number of columns per row based on terminal width + columns_per_row = max(1, terminal_width // subtable_width_estimate) + + # Create the main table (one row of subtables) + main_table = Table(title="Carbon Intensity Report by Job", show_lines=True) + + # Add columns to the main table based on the calculated columns per row + for i in range(columns_per_row): + main_table.add_column(f"Subtable {i + 1}", style="white") + + # Populate the main table with subtables in each row + row = [] + for idx, job in enumerate(data): + # Build the subtable (TraceRoute table) + trace_route_table = buildTraceRouteTable( + job["transferNodeName"], + job["jobUuid"], + job["timeMeasuredAt"], + job["traceRouteCarbon"] + ) + + # Convert the trace_route_table to a Panel + trace_route_panel = Panel(trace_route_table, border_style="bold blue", title="Carbon Trace Route") + + # Add the rendered panel (subtable) to the row + row.append(trace_route_panel) + + # Once we've filled the row (columns_per_row), add it to the main table and reset the row for the next batch + if (idx + 1) % columns_per_row == 0 or (idx + 1) == len(data): + # Create columns layout from the current row of panels + columns_layout = Columns(row) + console.print(columns_layout) + row = [] + + +def buildTraceRouteTable(transferNodeName, jobUuid, timeMeasuredAt, trace_route_data): + # Create a nested table for trace route details + title = f"Transfer Node: {transferNodeName} - Job Uuid: {jobUuid} - Measured At: {timeMeasuredAt}" + trace_route_table = Table(title=title, show_header=True, header_style="bold blue") + trace_route_table.add_column("Index", justify="right", style="cyan") + trace_route_table.add_column("IP", style="cyan") + trace_route_table.add_column("Carbon Intensity", justify="right", style="green") + trace_route_table.add_column("Latitude", justify="right") + trace_route_table.add_column("Longitude", justify="right") + + # Populate nested table with each hop + for idx, hop in enumerate(trace_route_data, start=1): + trace_route_table.add_row( + str(idx), + hop["ip"], + str(hop["carbonIntensity"]), + str(hop["lat"]), + str(hop["lon"]) + ) + return trace_route_table diff --git a/odscli/sdk/constants.py b/odscli/sdk/constants.py index 4a3cad0..2a9977d 100755 --- a/odscli/sdk/constants.py +++ b/odscli/sdk/constants.py @@ -20,3 +20,24 @@ CRED_OAUTH_REGISTERV2 = "/api/oauth" CRED_ACCOUNT_GETV2 = "/endpoint-cred/{userId}/{type}" CRED_ACCOUNTID_GETV2 = "/endpoint-cred/{userId}/{type}/{accountId}" + +NODE_LIST_CONNECTORS = "/api/nodes/{user}" +NODE_LIST_ODS = "/api/nodes/ods" +NODE_COUNT = "/api/nodes/count" + +CARBON_API = "/api/carbon" +CARBON_NODE_AND_JOB = "/query/{transferNodeName}/{jobUuid}" +CARBON_USER = "/user" +CARBON_JOB = "/job/{jobUuid}" +CARBON_NODE = "/node/{transferNodeName}" +CARBON_LATEST = "/latest/{jobUuid}" +CARBON_RESULT = "/result/{job_uuid}" + + +def human_readable_size(size_in_bytes): + """Convert bytes to a human-readable string with appropriate units.""" + for unit in ['B', 'KB', 'MB', 'GB', 'TB']: + if size_in_bytes < 1024: + return f"{size_in_bytes:.2f} {unit}" + size_in_bytes /= 1024 + return f"{size_in_bytes:.2f} PB" # Handles very large sizes diff --git a/odscli/sdk/ftn_nodes.py b/odscli/sdk/ftn_nodes.py new file mode 100644 index 0000000..e53bc05 --- /dev/null +++ b/odscli/sdk/ftn_nodes.py @@ -0,0 +1,69 @@ +import click +import requests +from rich.console import Console +from rich.table import Table +from rich import print + +from odscli.sdk import token_utils, constants + +console = Console() + + +@click.group('nodes_cli') +@click.pass_context +def nodes_cli(): + pass + + +@nodes_cli.group('nodes') +def nodes(): + pass + + +@nodes.command("ls") +@click.argument('type', type=click.Choice(['ods', 'connectors'])) +def ls_connectors(type): + host, user, token = token_utils.readConfig() + if type == 'connectors': + req = constants.ODS_PROTOCOL + host + constants.NODE_LIST_CONNECTORS + req = req.format(user=user) + node_table_name = "{user} ODS Connectors".format(user=user) + else: + req = constants.ODS_PROTOCOL + host + constants.NODE_LIST_ODS + node_table_name = "ODS File Transfer Nodes" + + cookies = dict(ATOKEN=token) + res = requests.get(req, cookies=cookies) + node_table = build_node_table(node_table_name) + for entry in res.json(): + odsOwner = entry['odsOwner'] + nodeName = entry['nodeName'] + nodeUuid = entry['nodeUuid'] + runningJob = entry['runningJob'] + online = entry['online'] + jobId = entry['jobId'] + jobUuid = entry['jobUuid'] + node_table.add_row(odsOwner, nodeName, nodeUuid, str(runningJob), str(online), str(jobId), jobUuid) + + console.print(node_table) + + +@nodes.command("count") +def ls_node_count(): + host, user, token = token_utils.readConfig() + req = constants.ODS_PROTOCOL + host + constants.NODE_COUNT + cookies = dict(ATOKEN=token) + res = requests.get(req, cookies=cookies) + print(res.json()) + + +def build_node_table(name): + node_table = Table(title=name) + node_table.add_column("ODS Owner") + node_table.add_column("Node Name") + node_table.add_column("Node UUID") + node_table.add_column("Running Job") + node_table.add_column("Online") + node_table.add_column("Job Id") + node_table.add_column("Job UUID") + return node_table diff --git a/odscli/sdk/meta_query.py b/odscli/sdk/meta_query.py index e923cdb..a01d606 100644 --- a/odscli/sdk/meta_query.py +++ b/odscli/sdk/meta_query.py @@ -65,7 +65,7 @@ def query_all_jobs_ids(self): def query_job_ids_direct(self, transfer_url): # http://localhost:8092 - hostStr = transfer_url + "/api/v1/job/ids" + hostStr = transfer_url + "/api/v1/job/id" r = requests.get(hostStr) return r.json() diff --git a/odscli/sdk/schedule_job.py b/odscli/sdk/scheduler.py similarity index 54% rename from odscli/sdk/schedule_job.py rename to odscli/sdk/scheduler.py index c38c401..15ba6c0 100644 --- a/odscli/sdk/schedule_job.py +++ b/odscli/sdk/scheduler.py @@ -1,9 +1,16 @@ import click +from pygments.lexer import default + import odscli.sdk.token_utils as token_utils import requests import odscli.sdk.constants as constants import json from datetime import datetime +from rich.console import Console +from rich.table import Table +from rich.pretty import pprint + +console = Console() @click.group('schedule_cli') @@ -23,8 +30,75 @@ def ls(): url = constants.ODS_PROTOCOL + host + constants.SCHEDULE + "/list" cookies = dict(ATOKEN=token) resp = requests.get(url, params={'userEmail': user}, cookies=cookies) - print(resp.status_code) - print(resp.text) + jobs_data = resp.json() + + # Main table for listing all jobs + main_table = Table(title="Scheduled Jobs", show_lines=True) + main_table.add_column("Job #", style="cyan", no_wrap=True) + main_table.add_column("Job Details", style="magenta") + + # Loop over each job and create a structured table for each + for index, job_data in enumerate(jobs_data, start=1): + # Create a detailed table for each job + job_table = Table.grid(padding=(0, 1)) # Use grid layout to avoid extra column + + # Basic job information table + basic_info_table = Table(title="Basic Info", show_lines=True) + basic_info_table.add_column("Field", style="yellow") + basic_info_table.add_column("Value", style="green") + basic_info_table.add_row("Owner ID", job_data["ownerId"]) + basic_info_table.add_row("Job UUID", job_data["jobUuid"]) + basic_info_table.add_row("Transfer Node Name", job_data["transferNodeName"]) + + # Options table, excluding "chunkSize" + options_table = Table(title="Options", show_lines=True) + options_table.add_column("Option", style="cyan") + options_table.add_column("Value", style="magenta") + for key, value in job_data["options"].items(): + if key != "chunkSize": # Exclude chunkSize from visualization + options_table.add_row(key, str(value)) + + # Grid layout for Basic Info and Options side by side + basic_options_grid = Table.grid() + basic_options_grid.add_row(basic_info_table, options_table) + + source_table = Table(title="Source Information", show_lines=True) + source_table.add_column("Field", style="yellow") + source_table.add_column("Value", style="green") + source_table.add_row("Type", job_data["source"]["type"]) + source_table.add_row("Credential ID", job_data["source"]["credId"]) + source_table.add_row("Source Path", job_data["source"]["fileSourcePath"]) + + file_table = Table(title="File List", show_lines=True) + file_table.add_column("ID", style="cyan") + file_table.add_column("Path", style="green") + file_table.add_column("Size", style="red") + file_table.add_column("Chunk Size", style="blue") + for file_info in job_data["source"]["infoList"]: + file_table.add_row( + file_info["id"], + file_info["path"], + constants.human_readable_size(file_info["size"]), + constants.human_readable_size(file_info["chunkSize"]) + ) + source_table.add_row("File Information", file_table) + + destination_table = Table(title="Destination Information", show_lines=True) + destination_table.add_column("Field", style="yellow") + destination_table.add_column("Value", style="green") + destination_table.add_row("Type", job_data["destination"]["type"]) + destination_table.add_row("Credential ID", job_data["destination"]["credId"]) + destination_table.add_row("Destination Path", job_data["destination"]["fileDestinationPath"]) + + source_dest_grid = Table.grid() + source_dest_grid.add_row(source_table, destination_table) + + job_table.add_row(basic_options_grid) + job_table.add_row(source_dest_grid) + + main_table.add_row(f"Job {index}", job_table) + + console.print(main_table) @schedule.command('details') @@ -75,10 +149,16 @@ def rm(job_uuid): @click.option('--schedule_time', type=click.DateTime(), default=datetime.now(), help='ISO 8061 date time string on when to run the job.') @click.option('--transfer_node_name', '--node', type=click.STRING, default="") +@click.option('--percent_carbon', type=click.FLOAT, default=0.0, + help="Represents either an increase or a decrease in carbon intensity compared to initial scheduling") +@click.option('--percent_throughput', type=click.FLOAT, default=0.0, + help="Represents either an increase or a decrease in throughput compared to initial scheduling") +@click.option('--percent_electricity', type=click.FLOAT, default=0.0, + help="Represents either an increase or a decrease in electricity compared to initial scheduling") @click.option('--save_to_config', is_flag=True, default=False) def submit(source_credential_id, source_type, file_source_path, files, destination_credential_id, destination_type, file_destination_path, compress, encrypt, optimizer, overwrite, retry, verify, cc, p, pp, chunk_size, - transfer_node_name, schedule_time, save_to_config): + transfer_node_name, schedule_time, percent_carbon, percent_throughput, percent_electricity, save_to_config): host, user, token = token_utils.readConfig() body = { @@ -106,7 +186,12 @@ def submit(source_credential_id, source_type, file_source_path, files, destinati "chunkSize": chunk_size, "scheduledTime": schedule_time.strftime("%Y-%m-%dT%H:%M:%S.%fZ") }, - "transferNodeName": transfer_node_name + "transferNodeName": transfer_node_name, + "transferSla": { + "percentCarbon": percent_carbon, + "percentThroughput": percent_throughput, + "percentElectricity": percent_electricity + } } resourceList = [] @@ -121,34 +206,26 @@ def submit(source_credential_id, source_type, file_source_path, files, destinati url = constants.ODS_PROTOCOL + host + constants.SCHEDULE + "/schedule" cookies = dict(ATOKEN=token) resp = requests.post(url, cookies=cookies, json=body) - print(resp.status_code) - print(resp.text) + console.print("Job Uuid=", resp.text) @schedule.command("config") @click.argument('filename', type=click.Path(exists=True, writable=True)) -@click.option('--schedule_time', type=click.DateTime(), default=datetime.now(), - help='ISO 8061 date time string on when to run the job.') -@click.option('--schedule_direct', type=click.STRING, default="") -def config(filename, schedule_time, schedule_direct): +@click.option('--schedule_time', type=click.DateTime(), default=None, + help='ISO 8061 date time string on when to run the job. Ex: %Y-%m-%dT%H:%M:%S.%fZ') +def config(filename, schedule_time): try: with open(filename, "r") as json_file: data = json.load(json_file) - data['options']['scheduledTime'] = schedule_time.strftime("%Y-%m-%dT%H:%M:%S.%fZ") - - print(data) - if len(schedule_direct) > 1: - url = 'http://'+schedule_direct+'/job/direct' - resp = requests.post(url, json=data) - print(resp.status_code) - print(resp.text) - else: - host, user, token = token_utils.readConfig() - url = constants.ODS_PROTOCOL + host + constants.SCHEDULE + "/schedule" - cookies = dict(ATOKEN=token) - resp = requests.post(url, cookies=cookies, json=data) - print(resp.status_code) - print(resp.text) + if schedule_time is not None: + data['options']['scheduledTime'] = schedule_time.strftime("%Y-%m-%dT%H:%M:%S.%fZ") + console.print("Transfer Job Definition sent in:") + pprint(data) + host, user, token = token_utils.readConfig() + url = constants.ODS_PROTOCOL + host + constants.SCHEDULE + "/schedule" + cookies = dict(ATOKEN=token) + resp = requests.post(url, cookies=cookies, json=data) + console.print("Job Uuid=", resp.text) except FileNotFoundError: print(f"File not found: {filename}") except json.JSONDecodeError as e: @@ -157,7 +234,7 @@ def config(filename, schedule_time, schedule_direct): @schedule.command("parameters") @click.argument('node_name', type=click.STRING) -@click.option('--concurrency','-cc', type=click.INT, help="Number of files to transfer concurrently") +@click.option('--concurrency', '-cc', type=click.INT, help="Number of files to transfer concurrently") @click.option('--parallelism', '-p', type=click.INT, help="Number of parallel threads per file") @click.option('--pipelining', '-pp', type=click.INT, help="Pipelining level to use for file transfer") @click.option('--chunksize', '-cs', type=click.INT, help="Chunksize to use for file transfer, currently not supported") @@ -174,5 +251,8 @@ def parameters(node_name, concurrency, parallelism, pipelining, chunksize): url = constants.ODS_PROTOCOL + host + constants.SCHEDULE + "/adjust" cookies = dict(ATOKEN=token) resp = requests.put(url, cookies=cookies, json=body) - print(resp.status_code) - print(resp.text) + if resp.status_code < 300 and resp.status_code > 199: + console.print("Sent in param change request: ") + pprint(body) + else: + console.print(f"[red] Http error number: {resp.status_code}") diff --git a/requirements.txt b/requirements.txt index 5e552bd..a3d796b 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,5 +1,5 @@ requests~=2.31.0 -pandas~=1.4.3 +pandas setuptools~=65.5.1 tabulate~=0.8.10 python-dateutil~=2.8.2 diff --git a/transfer_httpTaccNginx_to_jgoldverg@gmail.com-mac.json b/transfer_httpTaccNginx_to_jgoldverg@gmail.com-mac.json index 22fa813..c1ec746 100644 --- a/transfer_httpTaccNginx_to_jgoldverg@gmail.com-mac.json +++ b/transfer_httpTaccNginx_to_jgoldverg@gmail.com-mac.json @@ -9,7 +9,7 @@ "secret": "" }, "fileSourcePath": "/", - "infoList": [ + "resourceList": [ { "path": "/parallel/", "id": "/parallel/", @@ -30,10 +30,15 @@ "retry": 5, "verify": false, "concurrencyThreadCount": 1, - "parallelThreadCount": 1, + "parallelThreadCount": 5, "pipeSize": 1, "chunkSize": 10000000, - "scheduledTime": "2023-10-25T14:52:15.183975Z" + "scheduledTime": "2024-11-25T14:52:15.183975Z" + }, + "transferSla": { + "percentCarbon": 0.0, + "percentThroughput":0.0, + "percentElectricity": 0.0 }, "transferNodeName": "" }