Skip to content

Commit d357ef5

Browse files
Merge pull request #95 from alexeevit/feature/getPodsQuery
Add get_pods() API method
2 parents 3ea7bba + dda4919 commit d357ef5

File tree

5 files changed

+94
-5
lines changed

5 files changed

+94
-5
lines changed

README.md

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -131,6 +131,9 @@ import runpod
131131

132132
runpod.api_key = "your_runpod_api_key_found_under_settings"
133133

134+
# Get all my pods
135+
pods = runpod.get_pods()
136+
134137
# Create a pod
135138
pod = runpod.create_pod("test", "runpod/stack", "NVIDIA GeForce RTX 3070")
136139

runpod/__init__.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -7,6 +7,7 @@
77
from .endpoint import AsyncioEndpoint, AsyncioJob
88
from .api_wrapper.ctl_commands import(
99
get_gpus, get_gpu,
10+
get_pods,
1011
create_pod, stop_pod, resume_pod, terminate_pod
1112
)
1213
from .cli.config import set_credentials, check_credentials, get_credentials

runpod/api_wrapper/ctl_commands.py

Lines changed: 13 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -5,8 +5,9 @@
55

66
from typing import Optional
77
from .queries import gpus
8+
from .queries import pods as pod_queries
89
from .graphql import run_graphql_query
9-
from .mutations import pods
10+
from .mutations import pods as pod_mutations
1011

1112

1213
def get_gpus() -> dict:
@@ -28,6 +29,13 @@ def get_gpu(gpu_id : str):
2829
cleaned_return = raw_return["data"]["gpuTypes"][0]
2930
return cleaned_return
3031

32+
def get_pods() -> dict:
33+
'''
34+
Get all pods
35+
'''
36+
raw_return = run_graphql_query(pod_queries.QUERY_POD)
37+
cleaned_return = raw_return["data"]["myself"]["pods"]
38+
return cleaned_return
3139

3240
def create_pod(name : str, image_name : str, gpu_type_id : str, cloud_type : str="ALL",
3341
data_center_id : Optional[str]=None, country_code:Optional[str]=None,
@@ -58,7 +66,7 @@ def create_pod(name : str, image_name : str, gpu_type_id : str, cloud_type : str
5866
'''
5967

6068
raw_response = run_graphql_query(
61-
pods.generate_pod_deployment_mutation(
69+
pod_mutations.generate_pod_deployment_mutation(
6270
name, image_name, gpu_type_id, cloud_type, data_center_id, country_code, gpu_count,
6371
volume_in_gb, container_disk_in_gb, min_vcpu_count, min_memory_in_gb, docker_args,
6472
ports, volume_mount_path, env)
@@ -80,7 +88,7 @@ def stop_pod(pod_id: str):
8088
>>> runpod.stop_pod(pod_id)
8189
'''
8290
raw_response = run_graphql_query(
83-
pods.generate_pod_stop_mutation(pod_id)
91+
pod_mutations.generate_pod_stop_mutation(pod_id)
8492
)
8593

8694
cleaned_response = raw_response["data"]["podStop"]
@@ -101,7 +109,7 @@ def resume_pod(pod_id: str, gpu_count: int):
101109
>>> runpod.resume_pod(pod_id)
102110
'''
103111
raw_response = run_graphql_query(
104-
pods.generate_pod_resume_mutation(pod_id, gpu_count)
112+
pod_mutations.generate_pod_resume_mutation(pod_id, gpu_count)
105113
)
106114

107115
cleaned_response = raw_response["data"]["podResume"]
@@ -120,5 +128,5 @@ def terminate_pod(pod_id: str):
120128
>>> runpod.terminate_pod(pod_id)
121129
'''
122130
run_graphql_query(
123-
pods.generate_pod_terminate_mutation(pod_id)
131+
pod_mutations.generate_pod_terminate_mutation(pod_id)
124132
)

runpod/api_wrapper/queries/pods.py

Lines changed: 35 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,35 @@
1+
"""
2+
RunPod | API Wrapper | Queries | GPUs
3+
"""
4+
5+
QUERY_POD = """
6+
query myPods {{
7+
myself {{
8+
pods {{
9+
id
10+
containerDiskInGb
11+
costPerHr
12+
desiredStatus
13+
dockerArgs
14+
dockerId
15+
env
16+
gpuCount
17+
imageName
18+
lastStatusChange
19+
machineId
20+
memoryInGb
21+
name
22+
podType
23+
port
24+
ports
25+
uptimeSeconds
26+
vcpuCount
27+
volumeInGb
28+
volumeMountPath
29+
machine {{
30+
gpuDisplayName
31+
}}
32+
}}
33+
}}
34+
}}
35+
"""

tests/test_api_wrapper/test_ctl_commands.py

Lines changed: 42 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -122,3 +122,45 @@ def test_terminate_pod(self):
122122
}
123123

124124
self.assertIsNone(ctl_commands.terminate_pod(pod_id="POD_ID"))
125+
126+
def test_get_pods(self):
127+
'''
128+
Tests get_pods
129+
'''
130+
with patch("runpod.api_wrapper.graphql.requests.post") as patch_request:
131+
patch_request.return_value.json.return_value = {
132+
"data": {
133+
"myself": {
134+
"pods": [
135+
{
136+
"id": "POD_ID",
137+
"containerDiskInGb": 5,
138+
"costPerHr": 0.34,
139+
"desiredStatus": "RUNNING",
140+
"dockerArgs": None,
141+
"dockerId": None,
142+
"env": [],
143+
"gpuCount": 1,
144+
"imageName": "runpod/pytorch:2.0.1-py3.10-cuda11.8.0-devel",
145+
"lastStatusChange": "Rented by User: Tue Aug 15 2023",
146+
"machineId": "MACHINE_ID",
147+
"memoryInGb": 83,
148+
"name": "POD_NAME",
149+
"podType": "RESERVED",
150+
"port": None,
151+
"ports": "80/http",
152+
"uptimeSeconds": 0,
153+
"vcpuCount": 21,
154+
"volumeInGb": 200,
155+
"volumeMountPath": "/workspace",
156+
"machine": { "gpuDisplayName": "RTX 3090" }
157+
}
158+
]
159+
}
160+
}
161+
}
162+
163+
pods = ctl_commands.get_pods()
164+
165+
self.assertEqual(len(pods), 1)
166+
self.assertEqual(pods[0]["id"], "POD_ID")

0 commit comments

Comments
 (0)