Skip to content

Commit

Permalink
Merge pull request #161 from bradmwilliams/examples
Browse files Browse the repository at this point in the history
More examples
  • Loading branch information
bradmwilliams authored Mar 27, 2024
2 parents ca1de74 + 430c2e6 commit 3ca6f8e
Show file tree
Hide file tree
Showing 3 changed files with 96 additions and 0 deletions.
17 changes: 17 additions & 0 deletions examples/etcd_status.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,17 @@
#!/usr/bin/python

import openshift_client as oc

if __name__ == '__main__':
options = {
'as': 'system:admin',
}

with oc.client_host():
with oc.timeout(60 * 5):
with oc.options(options):
with oc.project("openshift-etcd"):
pods = oc.selector("pods", labels={'app': 'etcd'}).objects()
print(f'Found: {len(pods)} pods')
result = pods[0].execute(cmd_to_exec=['etcdctl', 'endpoint', 'status', '--cluster', '-w', 'table'])
print(f'Result:\n{result.out()}')
40 changes: 40 additions & 0 deletions examples/login.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,40 @@
#!/usr/bin/python

import argparse
import traceback

import openshift_client as oc
from openshift_client import OpenShiftPythonException, Context

if __name__ == '__main__':
parser = argparse.ArgumentParser(description='OpenShift Client Login Example')
parser.add_argument('-k', '--kubeconfig', help='The kubeconfig to create', required=True)
parser.add_argument('-s', '--server', help='The API Server to communicate with', required=True)
parser.add_argument('-t', '--token', help='The login token', required=True)
args = vars(parser.parse_args())

my_context = Context()
my_context.token = args["token"]
my_context.api_server = args["server"]
my_context.kubeconfig_path = args["kubeconfig"]

with oc.timeout(60 * 30), oc.tracking() as t, my_context:
if oc.get_config_context() is None:
print(f'Current context not set! Logging into API server: {my_context.api_server}\n')
try:
oc.invoke('login')
except OpenShiftPythonException:
print('error occurred logging into API Server')
traceback.print_exc()
print(f'Tracking:\n{t.get_result().as_json(redact_streams=False)}\n\n')
exit(1)

print(f'Current context: {oc.get_config_context()}')

try:
pods = oc.selector('pods').objects()
print(f'Found: {len(pods)} pods')
except OpenShiftPythonException:
print('Error occurred getting pods')
traceback.print_exc()
print(f'Tracking:\n{t.get_result().as_json(redact_streams=False)}\n\n')
39 changes: 39 additions & 0 deletions examples/quotas.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,39 @@
#!/usr/bin/python

import openshift_client as oc

if __name__ == '__main__':
with oc.client_host():
with oc.timeout(60 * 5):
with oc.project('openshift-client-python'):
resource_quotas = oc.selector('resourcequotas').objects()
print(f'Found: {len(resource_quotas)} ResourceQuotas')

for resource_quota in resource_quotas:
print(f'Processing ResourceQuota: {resource_quota.name()}')
for key in resource_quota.model.spec.hard:
print(f' - {key}: {resource_quota.model.spec.hard[key]}')

limit_ranges = oc.selector('limitranges').objects()
print(f'\nFound: {len(limit_ranges)} LimitRanges')

for limit_range in limit_ranges:
print(f'Processing LimitRange: {limit_range.name()}')
for limit in limit_range.model.spec.limits:
print(f' Type: {limit.type}')
print(f' Default CPU Limit: {limit.default.cpu}')
print(f' Default Memory Limit: {limit.default.memory}')
print(f' Default CPU Request: {limit.defaultRequest.cpu}')
print(f' Default Memory Request: {limit.defaultRequest.memory}')

pods = oc.selector('pods').objects()
print(f'\nFound: {len(pods)} Pods')

for pod in pods:
print(f'Processing Pod: {pod.name()}')
for container in pod.model.spec.containers:
print(f' Processing Container: {container.name}')
print(f' CPU Limit: {container.resources.limits.cpu}')
print(f' CPU Request: {container.resources.requests.cpu}')
print(f' Memory Limit: {container.resources.limits.memory}')
print(f' Memory Request: {container.resources.requests.memory}')

0 comments on commit 3ca6f8e

Please sign in to comment.