Commit f931b8a3 authored by Raul Puri's avatar Raul Puri
Browse files

Update initialize.py

parent 6106127c
Loading
Loading
Loading
Loading
+4 −1
Original line number Diff line number Diff line
@@ -31,7 +31,10 @@ from megatron.global_vars import set_global_variables
def initialize_megatron(extra_args_provider=None, args_defaults={},
                        ignore_unknown_args=False, allow_no_cuda=False):
    """Set global variables, initialize distributed, and
    set autoresume and random seeds."""
    set autoresume and random seeds.
    `allow_no_cuda` should not be set unless using megatron for cpu only 
    data processing. In general this arg should not be set unless you know 
    what you are doing."""
    if not allow_no_cuda:
        # Make sure cuda is available.
        assert torch.cuda.is_available(), 'Megatron requires CUDA.'