Skip to content

Commit 7ef464a

Browse files
authored
Fixed Spark test to run within Framework Tests
(1) Uninstall and janitor in module teardown, because there are other framework tests running after Spark tests on the same cluster. (2) Some changes related to new HDFS: new endpoint, number of tasks, longer timeout.
2 parents 07180d0 + 4666d9c commit 7ef464a

File tree

1 file changed

+18
-3
lines changed

1 file changed

+18
-3
lines changed

tests/test.py

Lines changed: 18 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -25,10 +25,16 @@ def setup_module(module):
2525
_install_spark()
2626

2727

28+
def teardown_module(module):
29+
shakedown.uninstall_package_and_wait('spark')
30+
shakedown.uninstall_package_and_wait('hdfs')
31+
_run_janitor('hdfs')
32+
33+
2834
def _install_spark():
2935
options = {"hdfs":
3036
{"config-url":
31-
"http://hdfs.marathon.mesos:9000/v1/connection"}}
37+
"http://api.hdfs.marathon.l4lb.thisdcos.directory/v1/endpoints"}}
3238

3339
if os.environ.get('SECURITY') == 'strict':
3440
options['service'] = {"user": "nobody",
@@ -51,16 +57,25 @@ def _require_package(pkg_name):
5157
installed_pkgs = dcos.package.installed_packages(pkg_manager, None, None, False)
5258
if not any(pkg['name'] == pkg_name for pkg in installed_pkgs):
5359
shakedown.install_package(pkg_name, wait_for_completion=True)
54-
shakedown.wait_for(_is_hdfs_ready, ignore_exceptions=False, timeout_seconds=600)
60+
shakedown.wait_for(_is_hdfs_ready, ignore_exceptions=False, timeout_seconds=900)
5561

5662

57-
DEFAULT_HDFS_TASK_COUNT=8
63+
DEFAULT_HDFS_TASK_COUNT=10
5864
def _is_hdfs_ready(expected_tasks = DEFAULT_HDFS_TASK_COUNT):
5965
running_tasks = [t for t in shakedown.get_service_tasks('hdfs') \
6066
if t['state'] == 'TASK_RUNNING']
6167
return len(running_tasks) >= expected_tasks
6268

6369

70+
def _run_janitor(service_name):
71+
janitor_cmd = (
72+
'docker run mesosphere/janitor /janitor.py '
73+
'-r {svc}-role -p {svc}-principal -z dcos-service-{svc} --auth_token={auth}')
74+
shakedown.run_command_on_master(janitor_cmd.format(
75+
svc=service_name,
76+
auth=shakedown.run_dcos_command('config show core.dcos_acs_token')[0].strip()))
77+
78+
6479
def test_teragen():
6580
jar_url = "https://downloads.mesosphere.io/spark/examples/spark-terasort-1.0-jar-with-dependencies_2.11.jar"
6681
_run_tests(jar_url,

0 commit comments

Comments
 (0)