Skip to content

Commit 36211f2

Browse files
authored
Merge pull request #314 from FluxML/adam
Replace ADAM as Adam
2 parents 758112d + 9b3d27c commit 36211f2

File tree

9 files changed

+18
-18
lines changed

9 files changed

+18
-18
lines changed

examples/digitsum_deepsets.jl

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -117,8 +117,8 @@ function train(; kws...)
117117
ρ = Dense(args.hidden_dims[3], args.target_dim)
118118
model = DeepSet(ϕ, ρ) |> device
119119

120-
# ADAM optimizer
121-
opt = ADAM(args.η)
120+
# Adam optimizer
121+
opt = Adam(args.η)
122122

123123
# parameters
124124
ps = Flux.params(model)

examples/gae.jl

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -74,8 +74,8 @@ function train(; kws...)
7474

7575
model = GAE(encoder, σ) |> device
7676

77-
# ADAM optimizer
78-
opt = ADAM(args.η)
77+
# Adam optimizer
78+
opt = Adam(args.η)
7979

8080
# parameters
8181
ps = Flux.params(model)

examples/gat.jl

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -83,8 +83,8 @@ function train(; kws...)
8383
WithGraph(fg, GATConv(args.hidden_dim*args.heads=>args.target_dim, heads=args.heads, concat=false)),
8484
) |> device
8585

86-
# ADAM optimizer
87-
opt = ADAM(args.η)
86+
# Adam optimizer
87+
opt = Adam(args.η)
8888

8989
# parameters
9090
ps = Flux.params(model)

examples/gcn_with_fixed_graph.jl

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -78,8 +78,8 @@ function train(; kws...)
7878
WithGraph(fg, GCNConv(args.hidden_dim=>args.target_dim)),
7979
) |> device
8080

81-
# ADAM optimizer
82-
opt = ADAM(args.η)
81+
# Adam optimizer
82+
opt = Adam(args.η)
8383

8484
# parameters
8585
ps = Flux.params(model)

examples/gde.jl

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -85,8 +85,8 @@ function train(; kws...)
8585
WithGraph(fg, GCNConv(args.hidden_dim=>args.target_dim)),
8686
) |> device
8787

88-
# ADAM optimizer
89-
opt = ADAM(args.η)
88+
# Adam optimizer
89+
opt = Adam(args.η)
9090

9191
# parameters
9292
ps = Flux.params(model, node.p)

examples/graphconv.jl

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -73,8 +73,8 @@ function train(; kws...)
7373
WithGraph(fg, GraphConv(args.hidden_dim=>args.target_dim)),
7474
) |> device
7575

76-
# ADAM optimizer
77-
opt = ADAM(args.η)
76+
# Adam optimizer
77+
opt = Adam(args.η)
7878

7979
# parameters
8080
ps = Flux.params(model)

examples/msg-passing.jl

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -75,8 +75,8 @@ function train(; kws...)
7575
WithGraph(fg, GraphConv(fg, args.hidden_dim=>args.target_dim)),
7676
) |> device
7777

78-
# ADAM optimizer
79-
opt = ADAM(args.η)
78+
# Adam optimizer
79+
opt = Adam(args.η)
8080

8181
# parameters
8282
ps = Flux.params(model)

examples/semisupervised_gcn.jl

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -84,8 +84,8 @@ function train(; kws...)
8484
node_feature,
8585
) |> device
8686

87-
# ADAM optimizer
88-
opt = ADAM(args.η)
87+
# Adam optimizer
88+
opt = Adam(args.η)
8989

9090
# parameters
9191
ps = Flux.params(model)

examples/vgae.jl

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -94,8 +94,8 @@ function train(; kws...)
9494

9595
model = VGAE(encoder, decoder) |> device
9696

97-
# ADAM optimizer
98-
opt = ADAM(args.η)
97+
# Adam optimizer
98+
opt = Adam(args.η)
9999

100100
# parameters
101101
ps = Flux.params(model)

0 commit comments

Comments
 (0)