Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

remove pytest warning and update setup.py #128

Merged
merged 1 commit into from
Jun 21, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 2 additions & 1 deletion .flake8
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
[flake8]
ignore = E501,W503,E203
exclude = .git,__pycache__,venv
disable = W391
exclude = .git,__pycache__,venv,.venv
2 changes: 1 addition & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -53,7 +53,7 @@ Install with:

If you have cloned the git and want to develop locally, replace last step with:

python -m pip install --editable .
python -m pip install --editable .[dev]

Running unit tests
------------------
Expand Down
4 changes: 3 additions & 1 deletion core/opl/cluster_read.py
Original file line number Diff line number Diff line change
Expand Up @@ -456,7 +456,9 @@ def get_source(self, environment, path):


class RequestedInfo:
def __init__(self, config, start=None, end=None, args=argparse.Namespace(), sd=None):
def __init__(
self, config, start=None, end=None, args=argparse.Namespace(), sd=None
):
"""
"config" is input for config_stuff function
"start" and "end" are datetimes needed if config file contains some
Expand Down
12 changes: 6 additions & 6 deletions core/opl/data.py
Original file line number Diff line number Diff line change
Expand Up @@ -217,13 +217,13 @@ def data_stats(data):
"max": max(data),
"sum": sum(data),
"mean": statistics.mean(data),
"non_zero_mean": statistics.mean(non_zero_data)
if len(non_zero_data) > 0
else 0.0,
"non_zero_mean": (
statistics.mean(non_zero_data) if len(non_zero_data) > 0 else 0.0
),
"median": statistics.median(data),
"non_zero_median": statistics.median(non_zero_data)
if len(non_zero_data) > 0
else 0.0,
"non_zero_median": (
statistics.median(non_zero_data) if len(non_zero_data) > 0 else 0.0
),
"stdev": statistics.stdev(data) if len(data) > 1 else 0.0,
"range": max(data) - min(data),
"percentile25": q25,
Expand Down
14 changes: 14 additions & 0 deletions core/opl/date.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,3 +30,17 @@ def my_fromisoformat(string):
out = datetime.datetime.strptime(string, "%Y-%m-%dT%H:%M:%S")
out = out.replace(tzinfo=string_tz)
return out


def get_now_str() -> str:
"""
return current datetime in UTC string format
"""
return datetime.datetime.now(datetime.timezone.utc).isoformat()


def get_now() -> datetime.datetime:
"""
return current datetime in UTC datetime format
"""
return datetime.datetime.now(datetime.timezone.utc)
19 changes: 15 additions & 4 deletions core/opl/shovel.py
Original file line number Diff line number Diff line change
Expand Up @@ -213,7 +213,9 @@ def upload(self):
elif self.args.test_end is None:
raise Exception("Test end is required to work with --horreum-upload")
elif self.args.test_job_matcher_label is None:
raise Exception("Test job matcher Horreum label name is required to work with --horreum-upload")
raise Exception(
"Test job matcher Horreum label name is required to work with --horreum-upload"
)

self.logger.debug(f"Loading file {self.args.horreum_data_file}")
with open(self.args.horreum_data_file, "r") as fd:
Expand Down Expand Up @@ -313,7 +315,9 @@ def result(self):
change_detected = True
break

print(f"Writing result to {self.args.horreum_data_file}: {'FAIL' if change_detected else 'PASS'}")
print(
f"Writing result to {self.args.horreum_data_file}: {'FAIL' if change_detected else 'PASS'}"
)
if change_detected:
sd.set("result", "FAIL")
else:
Expand Down Expand Up @@ -351,8 +355,15 @@ def set_args(parser, group_actions):
group.add_argument(
"--test-name-horreum", default="load-tests-result", help="Test Name"
)
group.add_argument("--test-job-matcher", default="jobName", help="Field name in JSON with unique enough value we use to detect if document is already in Horreum")
group.add_argument("--test-job-matcher-label", help="Label name in Horreum with unique enough value we use to detect if document is already in Horreum")
group.add_argument(
"--test-job-matcher",
default="jobName",
help="Field name in JSON with unique enough value we use to detect if document is already in Horreum",
)
group.add_argument(
"--test-job-matcher-label",
help="Label name in Horreum with unique enough value we use to detect if document is already in Horreum",
)
group.add_argument("--test-owner", default="rhtap-perf-test-team")
group.add_argument("--test-access", default="PUBLIC")
group.add_argument("--test-start", help="time when the test started")
Expand Down
16 changes: 12 additions & 4 deletions core/opl/skelet.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,9 @@ def setup_logger(app_name, stderr_log_lvl):
Create logger that logs to both stderr and log file but with different log levels
"""
# Remove all handlers from root logger if any
logging.basicConfig(level=logging.NOTSET, handlers=[]) # `force=True` was added in Python 3.8 :-(
logging.basicConfig(
level=logging.NOTSET, handlers=[]
) # `force=True` was added in Python 3.8 :-(
# Change root logger level from WARNING (default) to NOTSET in order for all messages to be delegated
logging.getLogger().setLevel(logging.NOTSET)

Expand Down Expand Up @@ -47,6 +49,7 @@ def setup_logger(app_name, stderr_log_lvl):

return logging.getLogger(app_name)


@contextmanager
def test_setup(parser, logger_name="root"):
parser.add_argument(
Expand All @@ -55,12 +58,14 @@ def test_setup(parser, logger_name="root"):
help='File where we maintain metadata, results, parameters and measurements for this test run (also use env variable STATUS_DATA_FILE, default to "/tmp/status-data.json")',
)
parser.add_argument(
"-v", "--verbose",
"-v",
"--verbose",
action="store_true",
help="Show verbose output",
)
parser.add_argument(
"-d", "--debug",
"-d",
"--debug",
action="store_true",
help="Show debug output",
)
Expand Down Expand Up @@ -113,8 +118,11 @@ def wrapper(*args, **kwargs):
raise # Reraise the exception after all retries are exhausted

attempt += 1
logging.debug(f"Retrying in {wait_seconds} seconds. Attempt {attempt}/{max_attempts} failed with: {e}")
logging.debug(
f"Retrying in {wait_seconds} seconds. Attempt {attempt}/{max_attempts} failed with: {e}"
)
time.sleep(wait_seconds)

return wrapper

return decorator
14 changes: 5 additions & 9 deletions core/opl/status_data.py
Original file line number Diff line number Diff line change
Expand Up @@ -231,7 +231,7 @@ def clear(self):
"""
self._data = {
"name": None,
"started": get_now_str(),
"started": date.get_now_str(),
"ended": None,
"owner": None,
"result": None,
Expand Down Expand Up @@ -263,7 +263,9 @@ def save(self, filename=None):
if self._filename_mtime != current_mtime:
tmp = tempfile.mktemp()
self._save(tmp)
raise Exception(f"Status data file {self._filename} was modified since we loaded it so I do not want to overwrite it. Instead, saved to {tmp}")
raise Exception(
f"Status data file {self._filename} was modified since we loaded it so I do not want to overwrite it. Instead, saved to {tmp}"
)
else:
self._filename = filename

Expand All @@ -278,12 +280,6 @@ def _save(self, filename):
logging.debug(f"Saved status data to {filename}")


def get_now_str():
now = datetime.datetime.utcnow()
now = now.replace(tzinfo=datetime.timezone.utc)
return now.isoformat()


def doit_set(status_data, set_this):
for item in set_this:
if item == "":
Expand All @@ -296,7 +292,7 @@ def doit_set(status_data, set_this):
value = value[1:-1]

if value == "%NOW%":
value = get_now_str()
value = date.get_now_str()
else:
try:
value = int(value)
Expand Down
9 changes: 6 additions & 3 deletions core/opl/status_data_updater.py
Original file line number Diff line number Diff line change
Expand Up @@ -39,11 +39,14 @@

def get_session():
session = requests.Session()
retry_adapter = requests.adapters.HTTPAdapter(max_retries=urllib3.Retry(total=None, connect=10, backoff_factor=1))
session.mount('https://', retry_adapter)
session.mount('http://', retry_adapter)
retry_adapter = requests.adapters.HTTPAdapter(
max_retries=urllib3.Retry(total=None, connect=10, backoff_factor=1)
)
session.mount("https://", retry_adapter)
session.mount("http://", retry_adapter)
return session


def _es_get_test(session, args, key, val, size=1, sort_by="started"):
url = f"{args.es_server}/{args.es_index}/_search"
headers = {
Expand Down
4 changes: 3 additions & 1 deletion opl/cluster_read.py
Original file line number Diff line number Diff line change
Expand Up @@ -456,7 +456,9 @@ def get_source(self, environment, path):


class RequestedInfo:
def __init__(self, config, start=None, end=None, args=argparse.Namespace(), sd=None):
def __init__(
self, config, start=None, end=None, args=argparse.Namespace(), sd=None
):
"""
"config" is input for config_stuff function
"start" and "end" are datetimes needed if config file contains some
Expand Down
12 changes: 6 additions & 6 deletions opl/data.py
Original file line number Diff line number Diff line change
Expand Up @@ -217,13 +217,13 @@ def data_stats(data):
"max": max(data),
"sum": sum(data),
"mean": statistics.mean(data),
"non_zero_mean": statistics.mean(non_zero_data)
if len(non_zero_data) > 0
else 0.0,
"non_zero_mean": (
statistics.mean(non_zero_data) if len(non_zero_data) > 0 else 0.0
),
"median": statistics.median(data),
"non_zero_median": statistics.median(non_zero_data)
if len(non_zero_data) > 0
else 0.0,
"non_zero_median": (
statistics.median(non_zero_data) if len(non_zero_data) > 0 else 0.0
),
"stdev": statistics.stdev(data) if len(data) > 1 else 0.0,
"range": max(data) - min(data),
"percentile25": q25,
Expand Down
14 changes: 14 additions & 0 deletions opl/date.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,3 +30,17 @@ def my_fromisoformat(string):
out = datetime.datetime.strptime(string, "%Y-%m-%dT%H:%M:%S")
out = out.replace(tzinfo=string_tz)
return out


def get_now_str() -> str:
"""
return current datetime in UTC string format
"""
return datetime.datetime.now(datetime.timezone.utc).isoformat()


def get_now() -> datetime.datetime:
"""
return current datetime in UTC datetime format
"""
return datetime.datetime.now(datetime.timezone.utc)
15 changes: 5 additions & 10 deletions opl/generators/generic.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@
import os

import opl.gen
import opl.date


class GenericGenerator:
Expand Down Expand Up @@ -107,33 +108,27 @@ def _get_mac(self):
return opl.gen.gen_mac()

def _get_now_iso(self):
return (
datetime.datetime.utcnow().replace(tzinfo=datetime.timezone.utc).isoformat()
) # noqa: E501
return opl.date.get_now_str() # noqa: E501

def _get_now_iso_z(self):
return self._get_now_iso().replace("+00:00", "Z")

def _get_now_rfc(self):
rfc_time = (
datetime.datetime.utcnow().replace(tzinfo=datetime.timezone.utc).isoformat()
)
rfc_time = opl.date.get_now_str()
rfc_time = (rfc_time.replace("T", " "))[: len(rfc_time) - 3]
return rfc_time

def _get_tommorow_iso(self):
return (
datetime.datetime.utcnow().replace(tzinfo=datetime.timezone.utc)
+ datetime.timedelta(days=1)
opl.date.get_now() + datetime.timedelta(days=1)
).isoformat() # noqa: E501

def _get_tommorow_iso_z(self):
return self._get_tommorow_iso().replace("+00:00", "Z")

def _get_tommorow_rfc(self):
rfc_time_tommorow = (
datetime.datetime.utcnow().replace(tzinfo=datetime.timezone.utc)
+ datetime.timedelta(days=1)
opl.date.get_now() + datetime.timedelta(days=1)
).isoformat()
rfc_time_tommorow = (rfc_time_tommorow.replace("T", " "))[
: len(rfc_time_tommorow) - 3
Expand Down
2 changes: 1 addition & 1 deletion opl/generators/inventory_ingress.py
Original file line number Diff line number Diff line change
Expand Up @@ -87,7 +87,7 @@ def _data(self):
packages_generated = self.packages_generated
else:
packages_generated = self.pg.generate(self.packages)

data = {
"inventory_id": self._get_uuid(),
"subscription_manager_id": self._get_uuid(),
Expand Down
19 changes: 15 additions & 4 deletions opl/shovel.py
Original file line number Diff line number Diff line change
Expand Up @@ -213,7 +213,9 @@ def upload(self):
elif self.args.test_end is None:
raise Exception("Test end is required to work with --horreum-upload")
elif self.args.test_job_matcher_label is None:
raise Exception("Test job matcher Horreum label name is required to work with --horreum-upload")
raise Exception(
"Test job matcher Horreum label name is required to work with --horreum-upload"
)

self.logger.debug(f"Loading file {self.args.horreum_data_file}")
with open(self.args.horreum_data_file, "r") as fd:
Expand Down Expand Up @@ -313,7 +315,9 @@ def result(self):
change_detected = True
break

print(f"Writing result to {self.args.horreum_data_file}: {'FAIL' if change_detected else 'PASS'}")
print(
f"Writing result to {self.args.horreum_data_file}: {'FAIL' if change_detected else 'PASS'}"
)
if change_detected:
sd.set("result", "FAIL")
else:
Expand Down Expand Up @@ -351,8 +355,15 @@ def set_args(parser, group_actions):
group.add_argument(
"--test-name-horreum", default="load-tests-result", help="Test Name"
)
group.add_argument("--test-job-matcher", default="jobName", help="Field name in JSON with unique enough value we use to detect if document is already in Horreum")
group.add_argument("--test-job-matcher-label", help="Label name in Horreum with unique enough value we use to detect if document is already in Horreum")
group.add_argument(
"--test-job-matcher",
default="jobName",
help="Field name in JSON with unique enough value we use to detect if document is already in Horreum",
)
group.add_argument(
"--test-job-matcher-label",
help="Label name in Horreum with unique enough value we use to detect if document is already in Horreum",
)
group.add_argument("--test-owner", default="rhtap-perf-test-team")
group.add_argument("--test-access", default="PUBLIC")
group.add_argument("--test-start", help="time when the test started")
Expand Down
Loading
Loading