diff --git a/README.md b/README.md index 93f2e490..c3beef53 100644 --- a/README.md +++ b/README.md @@ -134,6 +134,9 @@ runpod.api_key = "your_runpod_api_key_found_under_settings" # Get all my pods pods = runpod.get_pods() +# Get a specific pod +pod = runpod.get_pod(pod.id) + # Create a pod pod = runpod.create_pod("test", "runpod/stack", "NVIDIA GeForce RTX 3070") diff --git a/runpod/api_wrapper/ctl_commands.py b/runpod/api_wrapper/ctl_commands.py index 18f56386..c3976e1b 100644 --- a/runpod/api_wrapper/ctl_commands.py +++ b/runpod/api_wrapper/ctl_commands.py @@ -38,6 +38,15 @@ def get_pods() -> dict: cleaned_return = raw_return["data"]["myself"]["pods"] return cleaned_return +def get_pod(pod_id : str): + ''' + Get a specific pod + + :param pod_id: the id of the pod + ''' + raw_response = run_graphql_query(pod_queries.generate_pod_query(pod_id)) + return raw_response["data"]["pod"] + def create_pod(name : str, image_name : str, gpu_type_id : str, cloud_type : str="ALL", data_center_id : Optional[str]=None, country_code:Optional[str]=None, gpu_count:int=1, volume_in_gb:int=0, container_disk_in_gb:int=5, diff --git a/runpod/api_wrapper/queries/pods.py b/runpod/api_wrapper/queries/pods.py index 6bb2075d..e6c1daa9 100644 --- a/runpod/api_wrapper/queries/pods.py +++ b/runpod/api_wrapper/queries/pods.py @@ -33,3 +33,38 @@ }} }} """ + +def generate_pod_query(pod_id): + ''' + Generate a query for a specific GPU type + ''' + + return f""" + query pod {{ + pod(input: {{podId: "{pod_id}"}}) {{ + id + containerDiskInGb + costPerHr + desiredStatus + dockerArgs + dockerId + env + gpuCount + imageName + lastStatusChange + machineId + memoryInGb + name + podType + port + ports + uptimeSeconds + vcpuCount + volumeInGb + volumeMountPath + machine {{ + gpuDisplayName + }} + }} + }} + """ diff --git a/tests/test_api_wrapper/test_ctl_commands.py b/tests/test_api_wrapper/test_ctl_commands.py index 86cad375..e37e6200 100644 --- a/tests/test_api_wrapper/test_ctl_commands.py +++ b/tests/test_api_wrapper/test_ctl_commands.py @@ -193,3 +193,40 @@ def test_get_pods(self): self.assertEqual(len(pods), 1) self.assertEqual(pods[0]["id"], "POD_ID") + + def test_get_pod(self): + ''' + Tests get_pods + ''' + with patch("runpod.api_wrapper.graphql.requests.post") as patch_request: + patch_request.return_value.json.return_value = { + "data": { + "pod": { + "id": "POD_ID", + "containerDiskInGb": 5, + "costPerHr": 0.34, + "desiredStatus": "RUNNING", + "dockerArgs": None, + "dockerId": None, + "env": [], + "gpuCount": 1, + "imageName": "runpod/pytorch:2.0.1-py3.10-cuda11.8.0-devel", + "lastStatusChange": "Rented by User: Tue Aug 15 2023", + "machineId": "MACHINE_ID", + "memoryInGb": 83, + "name": "POD_NAME", + "podType": "RESERVED", + "port": None, + "ports": "80/http", + "uptimeSeconds": 0, + "vcpuCount": 21, + "volumeInGb": 200, + "volumeMountPath": "/workspace", + "machine": { "gpuDisplayName": "RTX 3090" } + } + } + } + + pods = ctl_commands.get_pod("POD_ID") + + self.assertEqual(pods["id"], "POD_ID")