Commit f69e69ea authored by Brewer, Wes's avatar Brewer, Wes
Browse files

Merge branch 'fix-accounts' into 'support-both-submit-start-times'

Added --accounts option, to track the accounts.

See merge request !74
parents e4586949 e51d9303
Loading
Loading
Loading
Loading
+1 −0
Original line number Diff line number Diff line
@@ -38,6 +38,7 @@ parser.add_argument('-w', '--workload', type=str, choices=choices, default=choic
choices = ['layout1', 'layout2']
parser.add_argument('-x', '--partitions', nargs='+', default=None, help='List of machine configurations to use, e.g., -x setonix-cpu setonix-gpu')
parser.add_argument('--layout', type=str, choices=choices, default=choices[0], help='Layout of UI')
parser.add_argument('--accounts', action='store_true', help='Flag indicating if accounts should be tracked')
parser.add_argument('--accounts-json', type=str, help='Json of account stats generated in previous run. see raps/accounts.py')

args = parser.parse_args()
+17 −16
Original line number Diff line number Diff line
@@ -106,8 +106,6 @@ if args.replay:
    else:  # custom data loader
        print(*args.replay)
        jobs = td.load_data(args.replay)
        accounts = Accounts(jobs)
        accounts_dict = accounts.to_dict()
        td.save_snapshot(jobs, filename=DIR_NAME)

    # Set number of timesteps based on the last job running which we assume
@@ -123,12 +121,6 @@ if args.replay:
else:  # Synthetic jobs
    wl = Workload(config)
    jobs = getattr(wl, args.workload)(num_jobs=args.numjobs)
    job_accounts = Accounts(jobs)
    if args.accounts_json:
        loaded_accounts = Accounts.from_json_filename(args.accounts_json)
        accounts = Accounts.merge(loaded_accounts,job_accounts)
    else:
        accounts = job_accounts

    if args.verbose:
        for job_vector in jobs:
@@ -146,7 +138,15 @@ else: # Synthetic jobs
OPATH = OUTPUT_PATH / DIR_NAME
print("Output directory is: ", OPATH)
sc.opath = OPATH
#sc.accounts = accounts

if args.accounts:
    job_accounts = Accounts(jobs)
    if args.accounts_json:
        loaded_accounts = Accounts.from_json_filename(args.accounts_json)
        accounts = Accounts.merge(loaded_accounts,job_accounts)
    else:
        accounts = job_accounts
    sc.accounts = accounts

if args.plot or args.output:
    try:
@@ -248,10 +248,11 @@ if args.output:
            with open(OPATH / 'stats.out', 'w') as f:
                json.dump(engine_stats, f, indent=4)
                json.dump(job_stats, f, indent=4)
        except:
        except TypeError:  # Is this the correct error code?
            write_dict_to_file(engine_stats, OPATH / 'stats.out')
            write_dict_to_file(job_stats, OPATH / 'stats.out')

        if args.accounts:
            try:
                with open(OPATH / 'accounts.json', 'w') as f:
                    json_string = json.dumps(sc.accounts.to_dict())
+2 −1
Original line number Diff line number Diff line
@@ -136,7 +136,8 @@ class Engine:
            self.running.remove(job)
            self.jobs_completed += 1
            job_stats = job.statistics()
            #self.accounts.update_account_statistics(job_stats)
            if self.accounts:
                self.accounts.update_account_statistics(job_stats)
            self.job_history_dict.append(job_stats.__dict__)
            # Free the nodes via the resource manager.
            self.resource_manager.free_nodes_from_job(job)