Commit 0351887d authored by Devanshu Agrawal's avatar Devanshu Agrawal
Browse files

Add loss over depth plots.

parent f2bffc72
Loading
Loading
Loading
Loading
+68 −20
Original line number Diff line number Diff line
@@ -30,8 +30,8 @@ def heat_maps(dataset, depths, widths, save_csv=False):
		plt.figure()
		plt.imshow(ims[q], cmap="YlGnBu_r", origin="lower")
		plt.colorbar()
		plt.xticks(np.arange(len(widths))+1, widths)
		plt.yticks(np.arange(len(depths))+1, depths)
		plt.xticks(np.arange(len(widths)), list(map(lambda w: "$\\infty$" if w == 0 else w, widths)))
		plt.yticks(np.arange(len(depths)), depths)
		plt.xlabel("Width", fontsize=12)
		plt.ylabel("Depth", fontsize=12)
		plt.title(title, fontsize=16)
@@ -51,7 +51,7 @@ def heat_maps(dataset, depths, widths, save_csv=False):
			df.to_csv(os.path.join(dir, q+".csv"), index=False)


def learning_curves(dataset, depth, width):
def learning_curves(dataset, depth, width, omit_iters=0):
	with open( os.path.join("../results", dataset, "depths1_{}_widths{}.json".format(depth, width)), "r") as fp:
		results = json.load(fp)
		dir = os.path.join("figs", dataset, "lc", "d{}w{}".format(depth, width))
@@ -59,8 +59,8 @@ def learning_curves(dataset, depth, width):
	# loss
	iters = np.arange(1, len(results["train"]["loss"])+1)
	plt.figure()
	plt.plot(iters, results["train"]["loss"], color="red", label="Before update")
	plt.plot(iters, results["train"]["loss_new"], color="blue", label="After update")
	plt.plot(iters[omit_iters:], results["train"]["loss"][omit_iters:], color="red", label="Before update")
	plt.plot(iters[omit_iters:], results["train"]["loss_new"][omit_iters:], color="blue", label="After update")
	plt.legend(loc="upper right", bbox_to_anchor=(1, 1), fancybox=True, fontsize=10)
	plt.xlabel("Iteration", fontsize=12)
	plt.title("Loss", fontsize=16)
@@ -70,7 +70,7 @@ def learning_curves(dataset, depth, width):
	plt.close()
	# delta loss
	plt.figure()
	plt.plot(iters, results["train"]["delta_loss"], color="black")
	plt.plot(iters[omit_iters:], results["train"]["delta_loss"][omit_iters:], color="black")
	plt.xlabel("Iteration", fontsize=12)
	plt.ylabel("(%)", fontsize=12)
	plt.title("Change in loss", fontsize=16)
@@ -82,7 +82,7 @@ def learning_curves(dataset, depth, width):
	iters = np.arange(len(results["train"]["loss"])+1)
	plt.figure()
	for (param, color) in zip(["v_b", "v_w", "v_n"], ["red", "green", "blue"]):
		plt.plot(iters, results["train"][param], color=color, label="${}$".format(param))
		plt.plot(iters[omit_iters:], results["train"][param][omit_iters:], color=color, label="${}$".format(param))
	plt.legend(loc="upper right", bbox_to_anchor=(1, 1), fancybox=True, fontsize=10)
	plt.xlabel("Iteration", fontsize=12)
	plt.title("Hyperparameters", fontsize=16)
@@ -92,14 +92,38 @@ def learning_curves(dataset, depth, width):
	plt.close()


def tex(dataset, dataset_name, lc_depths, lc_widths):
def loss_over_depth(dataset, depths, width):
	losses = []
	for d in depths:
		with open( os.path.join("../results", dataset, "depths1_{}_widths{}.json".format(d, width)), "r") as fp:
			results = json.load(fp)
		losses.append( results["test"]["loss"] )
	plt.figure()
	plt.plot(depths, losses, color="black")
	plt.xlabel("Depth", fontsize=12)
	plt.ylabel("Loss", fontsize=12)
	if width == 0:
		plt.title("Width $\\infty$", fontsize=16)
	else:
		plt.title("Width {}".format(width), fontsize=16)
	plt.tight_layout()
	dir = os.path.join("figs", dataset, "lod")
	os.makedirs(dir, exist_ok=True)
	filename = os.path.join(dir, "w{}.png".format(width))
	plt.savefig(filename, box_inches="tight")
	plt.close()


def tex(dataset, dataset_name, lc_depths, lc_widths, lod_widths=None):
	lc_sections = ""
	for d in lc_depths:
		for w in lc_widths:
			w_replace = "$\\infty$" if w == 0 else str(int(w))
			with open("templates/lc.tex", "r") as fp:
				lcs = fp.read()
			lcs = lcs.replace("%depth%", str(int(d)))
			lcs = lcs.replace("%width%", str(int(w)))
			lcs = lcs.replace("%width%", w_replace)
			lcs = lcs.replace("w$\\infty$", "w0")
			lc_sections = lc_sections + lcs
	with open("templates/main.tex", "r") as fp:
		main_text = fp.read()
@@ -111,25 +135,49 @@ def tex(dataset, dataset_name, lc_depths, lc_widths):
	for (k, v) in args.items():
		k = "%{}%".format(k)
		main_text = main_text.replace(k, str(v))
	if lod_widths is not None:
		lods = ""
		for i in range(2):
			lods = lods + "\\newpage\n\\begin{center}\n\\begin{tabular}{cc}\n"
			for j in range(6):
				w = lod_widths[6*i+j]
				ending = "& %" if j%2 == 0 else "\\\\ %"
				lods = lods + "\\includegraphics[width=0.5\\linewidth]{figs/"+dataset+"/lod/w"+str(int(w))+".png} "+ending+"\n"
			lods = lods + "\\end{tabular}\n\\end{center}\n\n"
		main_text = main_text.replace("%lodplots%", lods)
	with open(dataset+".tex", "w") as fp:
		fp.write(main_text)


if __name__ == "__main__":
	dataset = "boston"
	dataset_name = "Boston housing prices"
	datasets = ["boston", "iris", "rings"]
	dataset_names = ["Boston housing prices", "Iris", "Rings"]

	for (dataset, dataset_name) in zip(datasets, dataset_names):
		hm_depths = list(range(1, 13))  # + [10*i for i in range(1, 10)]
		if dataset == "boston":
			hm_widths = [1, 2, 4, 8, 16, 32, 64, 128, 256, 512, 1024, 0]
		else:
			hm_widths = [1, 2, 4, 8, 16, 32, 64, 128, 256, 512, 1024, 0]

		lc_depths = [1, 6, 12]  # [1, 10, 50, 90]
		if dataset == "boston":
			lc_widths = [1, 32, 1024, 0]
			omit_iters = 0
		else:
			lc_widths = [1, 32, 1024, 0]
			omit_iters = 10

		heat_maps(dataset, hm_depths, hm_widths, save_csv=True)

	hm_depths = [10*i for i in range(1, 10)]
	hm_widths = [2, 4, 8, 16, 32, 64, 128, 256]
#	heat_maps(dataset, hm_depths, hm_widths, save_csv=True)
		for d in lc_depths:
			for w in lc_widths:
				learning_curves(dataset, d, w, omit_iters=omit_iters)

	lc_depths = [10, 30, 50, 70, 90]
	lc_widths = [4, 16, 64, 256]
#	for d in lc_depths:
#		for w in lc_widths:
#			learning_curves(dataset, d, w)
		for w in hm_widths:
			loss_over_depth(dataset, hm_depths, w)

	tex(dataset, dataset_name, lc_depths, lc_widths)
		tex(dataset, dataset_name, lc_depths, lc_widths, lod_widths=hm_widths)


	print("Done!")
 No newline at end of file
+2 −0
Original line number Diff line number Diff line
@@ -51,6 +51,8 @@ The ``Change in loss'' learning curve gives the difference between the red and b
\end{tabular}
\end{center}

%lodplots%

%lcsections%

\end{document}
 No newline at end of file