一.作业要求

  • 用线性二回归实现或门,与门
项目 内容
这个作业属于哪个课程 人工智能实战
我在这个课程的目标是 将人工智能技术与本专业知识联系
这个作业在哪个具体方面帮助我实现目标 了解线性二分类的实现过程
作业正文 链接

二.核心代码

def Sigmoid(x):
    s=1/(1+np.exp(-x))
    return s

# 前向计算
def ForwardCalculationBatch(W, B, batch_X):
    Z = np.dot(W, batch_X) + B
    A = Sigmoid(Z)
    return A

# 计算损失函数值
def CheckLoss(W, B, X, Y):
    m = X.shape[1]
    A = ForwardCalculationBatch(W,B,X)
    
    p1 = 1 - Y
    p2 = np.log(1-A)
    p3 = np.log(A)

    p4 = np.multiply(p1 ,p2)
    p5 = np.multiply(Y, p3)

    LOSS = np.sum(-(p4 + p5))  #binary classification
    loss = LOSS / m
    return loss

def Inference(W,B,X_norm,xt):
    xt_normalized = NormalizePredicateData(xt, X_norm)
    A = ForwardCalculationBatch(W,B,xt_normalized)
    return A, xt_normalized

def ShowData(X,Y):
    for i in range(X.shape[1]):
        if Y[0,i] == 0:
            plt.plot(X[0,i], X[1,i], '.', c='r')
        elif Y[0,i] == 1:
            plt.plot(X[0,i], X[1,i], 'x', c='g')
        # end if
    # end for
    plt.show()

def ShowResult(X,Y,W,B,xt):
    for i in range(X.shape[1]):
        if Y[0,i] == 0:
            plt.plot(X[0,i], X[1,i], '.', c='r')
        elif Y[0,i] == 1:
            plt.plot(X[0,i], X[1,i], 'x', c='g')
        # end if
    # end for

    b12 = -B[0,0]/W[0,1]
    w12 = -W[0,0]/W[0,1]

    x = np.linspace(0,1,10)
    y = w12 * x + b12
    plt.plot(x,y)

    for i in range(xt.shape[1]):
        plt.plot(xt[0,i], xt[1,i], '^', c='b')

    plt.axis([-0.1,1.1,-0.1,1.1])
    plt.show()

if __name__ == '__main__':
    # SGD, MiniBatch, FullBatch
    method = "SGD"
    logic = "OR"
    if logic == "OR":
        XData= np.array([0, 0, 1, 1, 0, 1, 0, 1]).reshape(2, 4)
        YData= np.array([0, 1, 1, 1]).reshape(1, 4)
    elif logic == "AND":
        XData= np.array([0, 0, 1, 1, 0, 1, 0, 1]).reshape(2, 4)
        YData = np.array([0, 0, 0, 1]).reshape(1, 4)   
    X, X_norm = NormalizeData(XData)
    ShowData(XData, YData)
    Y = YData
    W, B = train(method, X, Y, ForwardCalculationBatch, CheckLoss)
    print("W=",W)
    print("B=",B)
    xt = np.array([0.3,0.7,0.2,0.4,0.5,0.7]).reshape(2,3,order='F')
    result, xt_norm = Inference(W,B,X_norm,xt)
    print("result=",result)
    print(np.around(result))
    ShowResult(X,YData,W,B,xt_norm)
    

其他代码调用Level0_BaseClassification.py

三.结果显示

与门结果
人工智能课程2019-第五次作业-徐浩原 随笔 第1张

SRE实战 互联网时代守护先锋,助力企业售后服务体系运筹帷幄!一键直达领取阿里云限量特价优惠。

人工智能课程2019-第五次作业-徐浩原 随笔 第2张

人工智能课程2019-第五次作业-徐浩原 随笔 第3张
考察参数点 xt = np.array([0.3,0.7,0.2,0.4,0.5,0.7]).reshape(2,3,order='F')
结果:
epoch=99, iteration=3, loss=0.255683
W= [[1.81802157 1.79301854]]
B= [[-2.93684159]]
result= [[0.24299626 0.13516647 0.31589148]]
[[0. 0. 0.]]

或门结果
人工智能课程2019-第五次作业-徐浩原 随笔 第4张

人工智能课程2019-第五次作业-徐浩原 随笔 第5张

人工智能课程2019-第五次作业-徐浩原 随笔 第6张
考察参数点 xt = np.array([0.3,0.7,0.2,0.4,0.5,0.7]).reshape(2,3,order='F')
结果:
epoch=99, iteration=3, loss=0.181774
W= [[2.50612426 2.51980188]]
B= [[-0.57481532]]
result= [[0.87444915 0.71795298 0.91998241]]
[[1. 1. 1.]]

扫码关注我们
微信号:SRE实战
拒绝背锅 运筹帷幄