diff --git a/python/pyspark/ml/tests/connect/test_parity_torch_distributor.py b/python/pyspark/ml/tests/connect/test_parity_torch_distributor.py index 55ea99a6540d..b855332f96c4 100644 --- a/python/pyspark/ml/tests/connect/test_parity_torch_distributor.py +++ b/python/pyspark/ml/tests/connect/test_parity_torch_distributor.py @@ -64,7 +64,7 @@ def setUpClass(cls): builder = builder.config( "spark.driver.resource.gpu.discoveryScript", cls.gpu_discovery_script_file_name ) - cls.spark = builder.remote("local-cluster[2,2,1024]").getOrCreate() + cls.spark = builder.remote("local-cluster[2,2,512]").getOrCreate() @classmethod def tearDownClass(cls): @@ -126,7 +126,7 @@ def setUpClass(cls): builder = builder.config( "spark.worker.resource.gpu.discoveryScript", cls.gpu_discovery_script_file_name ) - cls.spark = builder.remote("local-cluster[2,2,1024]").getOrCreate() + cls.spark = builder.remote("local-cluster[2,2,512]").getOrCreate() @classmethod def tearDownClass(cls): diff --git a/python/pyspark/ml/torch/tests/test_distributor.py b/python/pyspark/ml/torch/tests/test_distributor.py index ebd859031bda..9fd0b4cba945 100644 --- a/python/pyspark/ml/torch/tests/test_distributor.py +++ b/python/pyspark/ml/torch/tests/test_distributor.py @@ -148,6 +148,8 @@ def get_local_mode_conf(): return { "spark.test.home": SPARK_HOME, "spark.driver.resource.gpu.amount": "3", + "spark.driver.memory": "512M", + "spark.executor.memory": "512M", } @@ -158,6 +160,8 @@ def get_distributed_mode_conf(): "spark.task.cpus": "2", "spark.task.resource.gpu.amount": "1", "spark.executor.resource.gpu.amount": "1", + "spark.driver.memory": "512M", + "spark.executor.memory": "512M", } @@ -412,7 +416,7 @@ def setUpClass(cls): "spark.driver.resource.gpu.discoveryScript", cls.gpu_discovery_script_file_name ) - sc = SparkContext("local-cluster[2,2,1024]", cls.__name__, conf=conf) + sc = SparkContext("local-cluster[2,2,512]", cls.__name__, conf=conf) cls.spark = SparkSession(sc) @classmethod @@ -502,7 +506,7 @@ def setUpClass(cls): "spark.worker.resource.gpu.discoveryScript", cls.gpu_discovery_script_file_name ) - sc = SparkContext("local-cluster[2,2,1024]", cls.__name__, conf=conf) + sc = SparkContext("local-cluster[2,2,512]", cls.__name__, conf=conf) cls.spark = SparkSession(sc) @classmethod