Skip to content

Commit 6aedb03

Browse files
committed
Updated the hparams script
1 parent 0aae85b commit 6aedb03

File tree

1 file changed

+38
-0
lines changed

1 file changed

+38
-0
lines changed

examples/HParams.jl

Lines changed: 38 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,38 @@
1+
using TensorBoardLogger #import the TensorBoardLogger package
2+
using Logging #import Logging package
3+
using Random # Exports randn
4+
5+
# Run 10 experiments to see a plot
6+
for j in 1:10
7+
logger = TBLogger("random_walks/run$j", tb_append)
8+
9+
sigma = 0.05
10+
epochs = 50
11+
bias = (rand()*2 - 1) # create a random bias
12+
use_seed = false
13+
# Add in the a dummy loss metric
14+
with_logger(logger) do
15+
x = 0.0
16+
for i in 1:epochs
17+
x += sigma * randn() + bias
18+
@info "scalar" loss = x
19+
end
20+
end
21+
22+
# Hyperparameter is a dictionary of parameter names to their values. This
23+
# supports numerical types, bools and strings. Non-bool numerical types
24+
# are converted to Float64 to be displayed.
25+
hparams_config = Dict{String, Any}(
26+
"sigma"=>sigma,
27+
"epochs"=>epochs,
28+
"bias"=>bias,
29+
"use_seed"=>use_seed,
30+
"method"=>"MC"
31+
)
32+
# Specify a list of tags that you want to show up in the hyperparameter
33+
# comparison
34+
metrics = ["scalar/loss"]
35+
36+
# Write the hyperparameters and metrics config to the logger.
37+
write_hparams!(logger, hparams_config, metrics)
38+
end

0 commit comments

Comments
 (0)