import math
import numpy
as np
test =
"Hello World"
print (
"test: " + test)
def basic_sigmoid(x):
s =
1/(
1 + np.math.exp(-x))
return s
print(
"basic_sigmoid(3):",basic_sigmoid(
3))
x = np.array([
1,
2,
3])
print(
"np.exp(x):",np.exp(x))
x = np.array([
1,
2,
3])
print (
"x + 3:",x +
3)
def sigmoid(x):
"""
Compute the sigmoid of x
Arguments:
x -- A scalar or numpy array of any size
Return:
s -- sigmoid(x)
"""
s =
1 / (
1 + np.exp(-x))
return s
x = np.array([
1,
2,
3])
print(
"sigmoid(x):",sigmoid(x))
def sigmoid_derivative(x):
"""
Compute the gradient (also called the slope or derivative) of the sigmoid function with respect to its input x.
You can store the output of the sigmoid function into variables and then use it to calculate the gradient.
Arguments:
x -- A scalar or numpy array
Return:
ds -- Your computed gradient.
"""
s = sigmoid(x)
ds = s * (
1 - s)
return ds
x = np.array([
1,
2,
5])
print (
"sigmoid_derivative(x) = " + str(sigmoid_derivative(x)))
def image2vector(image):
"""
Argument:
image -- a numpy array of shape (length, height, depth)
Returns:
v -- a vector of shape (length*height*depth, 1)
"""
v = image.reshape(image.shape[
0] * image.shape[
1] * image.shape[
2],
1)
return v
image = np.array([[[
0.67826139,
0.29380381],
[
0.90714982,
0.52835647],
[
0.4215251 ,
0.45017551]],
[[
0.92814219,
0.96677647],
[
0.85304703,
0.52351845],
[
0.19981397,
0.27417313]],
[[
0.60659855,
0.00533165],
[
0.10820313,
0.49978937],
[
0.34144279,
0.94630077]]])
print (
"image2vector(image) = " + str(image2vector(image)))
def normalizeRows(x):
"""
Implement a function that normalizes each row of the matrix x (to have unit length).
Argument:
x -- A numpy matrix of shape (n, m)
Returns:
x -- The normalized (by row) numpy matrix. You are allowed to modify x.
"""
x_norm = np.linalg.norm(x, axis =
1, keepdims =
True)
x = x / x_norm
return x
x = np.array([
[
0,
3,
4],
[
1,
6,
4]])
print(
"normalizeRows(x) = " + str(normalizeRows(x)))
def softmax(x):
"""Calculates the softmax for each row of the input x.
Your code should work for a row vector and also for matrices of shape (n, m).
Argument:
x -- A numpy matrix of shape (n,m)
Returns:
s -- A numpy matrix equal to the softmax of x, of shape (n,m)
"""
x_exp = np.exp(x)
x_sum = np.sum(x_exp, axis =
1, keepdims =
True)
s = x_exp / x_sum
return s
x = np.array([
[
9,
2,
5,
0,
0],
[
7,
5,
0,
0 ,
0]])
print(
"softmax(x) = " + str(softmax(x)))