Skip to content

Commit

Permalink
chore(lib): change learning_rate
Browse files Browse the repository at this point in the history
  • Loading branch information
jeertmans committed Jul 25, 2023
1 parent 4472c90 commit cf48d57
Showing 1 changed file with 1 addition and 2 deletions.
3 changes: 1 addition & 2 deletions differt2d/optimize.py
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,7 @@ def minimize(
fun: Callable[[X], Y],
x0: Array,
steps: int = 100,
optimizer: optax.GradientTransformation = optax.adam(learning_rate=0.01),
optimizer: optax.GradientTransformation = optax.adam(learning_rate=0.1),
) -> Tuple[X, Y]:
"""
Minimizes a scalar function of one or more variables.
Expand Down Expand Up @@ -67,7 +67,6 @@ def minimize(
f_and_df = jax.value_and_grad(fun)
opt_state = optimizer.init(x0)

@jax.jit
def f(carry, x):
x, opt_state = carry
loss, grads = f_and_df(x)
Expand Down

0 comments on commit cf48d57

Please sign in to comment.