Commit 21d7b7bf authored by Devanshu Agrawal's avatar Devanshu Agrawal
Browse files

Add no_grad context during prediction.

parent 348d67ab
Loading
Loading
Loading
Loading
+6 −4
Original line number Diff line number Diff line
@@ -69,6 +69,7 @@ for i in range(args.iters):
	print("iter {} loss: {:.3f}".format(i, loss.item()))

# compute loss of final iteration
with torch.no_grad():
	Ks = model(X, num_samples=args.train_samples)
	loss = -model.log_likelihood(Ks, Y)
results["train"]["loss"].append(loss.item())
@@ -79,6 +80,7 @@ results["train"]["time"] = time.time()-time_0

# get final values with higher number of samples
time_0 = time.time()
with torch.no_grad():
	Ks = model(X, num_samples=args.test_samples)
	loss = -model.log_likelihood(Ks, Y)
results["test"] = {}