Loading megatron/initialize.py +8 −4 Original line number Diff line number Diff line Loading @@ -29,9 +29,13 @@ from megatron.global_vars import set_global_variables def initialize_megatron(extra_args_provider=None, args_defaults={}, ignore_unknown_args=False): ignore_unknown_args=False, allow_no_cuda=False): """Set global variables, initialize distributed, and set autoresume and random seeds.""" set autoresume and random seeds. `allow_no_cuda` should not be set unless using megatron for cpu only data processing. In general this arg should not be set unless you know what you are doing.""" if not allow_no_cuda: # Make sure cuda is available. assert torch.cuda.is_available(), 'Megatron requires CUDA.' Loading Loading
megatron/initialize.py +8 −4 Original line number Diff line number Diff line Loading @@ -29,9 +29,13 @@ from megatron.global_vars import set_global_variables def initialize_megatron(extra_args_provider=None, args_defaults={}, ignore_unknown_args=False): ignore_unknown_args=False, allow_no_cuda=False): """Set global variables, initialize distributed, and set autoresume and random seeds.""" set autoresume and random seeds. `allow_no_cuda` should not be set unless using megatron for cpu only data processing. In general this arg should not be set unless you know what you are doing.""" if not allow_no_cuda: # Make sure cuda is available. assert torch.cuda.is_available(), 'Megatron requires CUDA.' Loading