Skip to content

Commit

Permalink
Fix multi-gpu seeds (#162)
Browse files Browse the repository at this point in the history
Co-authored-by: Costa Huang <[email protected]>
  • Loading branch information
vwxyzjn and Costa Huang authored May 24, 2022
1 parent 86f5e82 commit 47b9f62
Showing 1 changed file with 16 additions and 1 deletion.
17 changes: 16 additions & 1 deletion rl_games/torch_runner.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
import time
import numpy as np
import random
import copy
Expand Down Expand Up @@ -55,20 +56,34 @@ def reset(self):

def load_config(self, params):
self.seed = params.get('seed', None)
if self.seed is None:
self.seed = int(time.time())

print("params['config'].get('multi_gpu', False)", params['config'].get('multi_gpu', False))
if params["config"].get('multi_gpu', False):
import horovod.torch as hvd

hvd.init()
self.seed += hvd.rank()
print(f"self.seed = {self.seed}")

self.algo_params = params['algo']
self.algo_name = self.algo_params['name']
self.exp_config = None

if self.seed:

torch.manual_seed(self.seed)
torch.cuda.manual_seed_all(self.seed)
np.random.seed(self.seed)
random.seed(self.seed)

# deal with environment specific seed if applicable
if 'env_config' in params['config']:
if not 'seed' in params['config']['env_config']:
params['config']['env_config']['seed'] = self.seed
else:
if params["config"].get('multi_gpu', False):
params['config']['env_config']['seed'] += hvd.rank()

config = params['config']
config['reward_shaper'] = tr_helpers.DefaultRewardsShaper(**config['reward_shaper'])
Expand Down

0 comments on commit 47b9f62

Please sign in to comment.