学习率周期性变化,能后解决陷入鞍点的问题,更多的方式请参考https://github.com/bckenstler/CLR
base_lr:最低的学习率
max_lr:最高的学习率
step_size:(2-8)倍的每个epoch的训练次数。
scale_fn(x):自定义用来减少最高的学习率。
cycle = np.floor(1+iterations/(2*step_size))
x = np.abs(iterations/step_size - 2*cycle + 1)
lr= base_lr + (max_lr-base_lr)*np.maximum(0, (1-x))*scale_fn(x)
一,学习率周期性变化
global_steps=tf.placeholder(shape=[1],dtype=tf.int64)
cycle = tf.cast(tf.floor(1. + tf.cast(global_steps, dtype=tf.float32) /(2 * 1000.)), dtype=tf.float32)x = tf.cast(tf.abs(tf.cast(global_steps, dtype=tf.float32) / 1000. - 2. * cycle + 1.), dtype=tf.float32)learning_rate = 1e-6 + (1e-3 - 1e-6) * tf.maximum(0., (1 - x))
with tf.Session() as sess:lr_list = []cycle_list=[]for i in range(8000):lr=sess.run(learning_rate,feed_dict={global_steps:[i]})lr_list.append(lr)cl = sess.run(cycle, feed_dict={global_steps: [i]})cycle_list.append(cl)plt.plot(lr_list)plt.show()print(cycle_list)
def lr_change(base_lr=1e-6, max_lr=1e-3, step_size=1000.,):lr_list=[]for clr_iterations in range(8000):cycle = np.floor(1 + clr_iterations / (2 * step_size))x = np.abs(clr_iterations / step_size - 2 * cycle + 1)lr=base_lr + (max_lr - base_lr) * np.maximum(0, (1 - x))#/(2 ** (cycle - 1))lr_list.append(lr)plt.plot(lr_list,'r')plt.xlabel('iterations')plt.ylabel('learning rate')plt.savefig('./data/learning_rate.jpg')plt.show()
二,学习率周期性衰减
global_steps=tf.placeholder(shape=[1],dtype=tf.int64)
cycle = tf.cast(tf.floor(1. + tf.cast(global_steps, dtype=tf.float32) /(2 * 1000.)), dtype=tf.float32)x = tf.cast(tf.abs(tf.cast(global_steps, dtype=tf.float32) / 1000. - 2. * cycle + 1.), dtype=tf.float32)learning_rate = 1e-6 + (1e-3 - 1e-6) * tf.maximum(0., (1 - x))/tf.cast(2**(cycle-1),dtype=tf.float32)
with tf.Session() as sess:lr_list = []cycle_list=[]for i in range(8000):lr=sess.run(learning_rate,feed_dict={global_steps:[i]})lr_list.append(lr)cl = sess.run(cycle, feed_dict={global_steps: [i]})cycle_list.append(cl)plt.plot(lr_list)plt.show()print(cycle_list)
def lr_change(base_lr=1e-6, max_lr=1e-3, step_size=1000.,):lr_list=[]for clr_iterations in range(8000):cycle = np.floor(1 + clr_iterations / (2 * step_size))x = np.abs(clr_iterations / step_size - 2 * cycle + 1)lr=base_lr + (max_lr - base_lr) * np.maximum(0, (1 - x))/(2 ** (cycle - 1))lr_list.append(lr)plt.plot(lr_list,'r')plt.xlabel('iterations')plt.ylabel('learning rate')plt.savefig('./data/learning_rate.jpg')plt.show()