tensorflow custom learning rate decay Posted on 2016-12-26 | In Technical Report | | Visitors 基本原理通过传递一个常量tensor lr给optimizer 123456789101112131415161718192021222324252627282930# import related modulefrom tensorflow.python.framework import constant_opfrom tensorflow.python.framework import opsfrom tensorflow.python.ops import control_flow_opsfrom tensorflow.python.ops import math_ops# implementation of inv decay in caffe base_lr*(1+gamm*itear)^(-power)def inv_decay(learning_rate, global_step, gamma, power, name=None): if global_step is None: raise ValueError("global_step is required for inv_decay.") with ops.name_scope(name, "InvDecay", [learning_rate, global_step, gamma, power]) as name: learning_rate = ops.convert_to_tensor(learning_rate, name="learning_rate") dtype = learning_rate.dtype global_step = math_ops.cast(global_step, dtype) gamma = math_ops.cast(gamma, dtype) power = math_ops.cast(power, dtype) base = math_ops.multiply(gamma, global_step) return math_ops.multiply(learning_rate, math_ops.pow(1+base, -power), name=name)lr = inv_decay(learning_rate=0.0005, global_step=get_global_step_var()-2800000, gamma=0.0001, power=0.75)tf.summary.scalar('lr', lr)optimizer=tf.train.GradientDescentOptimizer(lr)