lybbb 2020-05-05
# 新建测量器m = tf.keras.metrics.Accuracy()# 写入测量器m.update_state([0,1,1],[0,1,2])# 读取统计信息m.result() # 准确率为0.66# 清除m.reset_states()
acc_meter = tf.keras.metrics.Accuracy()loss_meter = tf.keras.metrics.Mean() # 求平均lossop = tf.keras.optimizers.Adam(0.01)import datetimecurrent_time = datetime.datetime.now().strftime("%Y%m%d-%H%M%S")log_dir = "logs/"+current_timesummary_writer = tf.summary.create_file_writer(logdir)for epoch in range(10): for step,(x,y) in enumerate(train_data): with tf.GradientTape() as tape: loss = tf.losses.categorical_crossentropy(y,model(x)) loss_meter.update_state(loss) # 准确率 grads = tape.gradient(loss,model.train_variables) # 求梯度 op.apply_gradients(zip(grads,model.train_variables)) # 更新梯度 w = w - delta with summary_writer.as_default() tf.summary.scalar(name="loss",data=loss_meter.result().numpy(),step=xxxx) print(epoch,step,loss,loss_meter.result().numpy()) # numpy() 将tensor转化为变量 loss_meter.reset_states() for step,(x,y) in enumerate(test_data): out = model(x) pred = tf.argmax(out,axis=-1) pred = tf.cast(pred,dtype=tf.int32) y = tf.cast(tf.argmax(y,axis=-1),dtype=tf.int32) acc_meter.update_state(y,pred) with summary_writer.as_default() tf.summary.scalar(name="acc",data=acc_meter.result().numpy(),step=xxxx) print(epoch,acc_meter.result().numpy()) acc_meter.reset_states()