# 机器学习心得（三）——softmax回归

xiaoxiao2021-02-28  7

# 机器学习心得（三）——softmax回归

## softmax函数于logistic函数的关系

logistic回归可以视为softmax进行二分类时的特例，即当类别数为2时，softmax 回归退化为 logistic 回归，由于数学学的不好，所以就不多推导了。

## 代码实现

# -*- coding: utf-8 -*- """ Created on Sun Apr 1 17:19:07 2018 @author: DZF_zuishuai """ import tensorflow as tf import numpy as np from tensorflow.examples.tutorials.mnist import input_data #导入mnist mnist = input_data.read_data_sets("MNIST_data/", one_hot = True) x = tf.placeholder("float", [None, 784]) # w = tf.Variable(tf.zeros([784,10])) b = tf.Variable(tf.zeros([10])) y = tf.nn.softmax(tf.matmul(x,w) + b) # loss y_ = tf.placeholder("float", [None, 10]) #定义交叉熵 cross_entropy = -tf.reduce_sum(y_*tf.log(y)) # 随机梯度下降 train_step = tf.train.GradientDescentOptimizer(0.01).minimize(cross_entropy) # 初始化 init = tf.initialize_all_variables() # Session sess = tf.Session() sess.run(init) # 迭代 for i in range(1000): batch_xs, batch_ys = mnist.train.next_batch(100) sess.run(train_step, feed_dict={x: batch_xs, y_: batch_ys}) if i % 50 == 0: correct_prediction = tf.equal(tf.argmax(y, 1), tf.argmax(y_, 1)) accuracy = tf.reduce_mean(tf.cast(correct_prediction, "float")) print ("Setp: ", i, "Accuracy: ",sess.run(accuracy, feed_dict={x: mnist.test.images, y_: mnist.test.labels}))