Skip to content

Commit

Permalink
run pytorch test
Browse files Browse the repository at this point in the history
  • Loading branch information
zhaoguochun1995 committed Sep 19, 2024
1 parent 8fa2653 commit 78dd837
Show file tree
Hide file tree
Showing 4 changed files with 86 additions and 0 deletions.
3 changes: 3 additions & 0 deletions ditorch/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,4 +28,7 @@
except Exception as e: # noqa: F841
pass


from ditorch import common_adapter # noqa: F401,E402

print(f"ditorch.framework: {framework}")
41 changes: 41 additions & 0 deletions ditorch/common_adapter.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,41 @@
import torch
import torch.testing._internal.common_utils as common_utils


class CudaNonDefaultStream:
def __enter__(self):
# Before starting CUDA test save currently active streams on all
# CUDA devices and set new non default streams to all CUDA devices
# to ensure CUDA tests do not use default stream by mistake.
beforeDevice = torch.cuda.current_device()
self.beforeStreams = []
for d in range(torch.cuda.device_count()):
self.beforeStreams.append(torch.cuda.current_stream(d))
deviceStream = torch.cuda.Stream(device=d)
self.beforeStreams[-1].synchronize()
"""
torch._C._cuda_setStream(stream_id=deviceStream.stream_id,
device_index=deviceStream.device_index,
device_type=deviceStream.device_type)
"""
torch.cuda.set_stream(deviceStream)

# torch._C._cuda_setDevice(beforeDevice)
torch.cuda.set_device(beforeDevice)

def __exit__(self, exec_type, exec_value, traceback):
# After completing CUDA test load previously active streams on all
# CUDA devices.
beforeDevice = torch.cuda.current_device()
for d in range(torch.cuda.device_count()):
"""
torch._C._cuda_setStream(stream_id=self.beforeStreams[d].stream_id,
device_index=self.beforeStreams[d].device_index,
device_type=self.beforeStreams[d].device_type)
"""
torch.cuda.set_stream(self.beforeStreams[d])
# torch._C._cuda_setDevice(beforeDevice)
torch.cuda.set_device(beforeDevice)


common_utils.CudaNonDefaultStream = CudaNonDefaultStream
35 changes: 35 additions & 0 deletions ditorch/test/test_torch_testcase.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,35 @@
import os


def run_test(file_path):
os.makedirs("temp", exist_ok=True)
with open(file_path, "rt") as torch_test_source_script_file:
content = torch_test_source_script_file.read()
content = "import ditorch\n" + content
new_file_name = "temp/" + file_path.split("/")[-1]
pass
with open(new_file_name, "w") as new_file:
new_file.write(content)
print(f"test {file_path} over")


TORCH_TEST_SCRIPT_FILE = [
"test_ops.py",
]


def main():
pytorch_dir = os.environ.get("PYTORCH_SOURCE_DIR")
if not pytorch_dir:
print("PYTORCH_SOURCE_DIR not set")
return -1
if not os.path.isdir(pytorch_dir):
print(f"{pytorch_dir} is not exist")
return -1
for file_path in TORCH_TEST_SCRIPT_FILE:
full_path = pytorch_dir + "/test/" + file_path
run_test(full_path)


if __name__ == "__main__":
main()
7 changes: 7 additions & 0 deletions ditorch/torch_npu_adapter.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,3 +15,10 @@ def current_stream(device=None):


torch.cuda.current_stream = current_stream


def get_device_capability():
return (7, 5)


torch.cuda.get_device_capability = get_device_capability

0 comments on commit 78dd837

Please sign in to comment.