Neural Network from Scratch in Cangjie: Part 4 - 仓颉从头开始的神经网络:第四部分

Today, we will try to recreate the output layer for a typical classification network with the help of the `Softmax` activation function. It will let us normalize the output of the final layer in our network and display probabilities of which of the 3 classes the network thinks the input belongs to. For example, [0.25, 0.4, 0.35]. In this example, the network would be 40% sure that the second class is the correct class. These probabilities add up to 1.

Let's do another forward pass through our network, but this time add an output layer with 3 neurons that represent the 3 predicted classes in our simulated spiral data.

import matrix4cj.*
import std.collection.*
import std.random.*
import csv4cj.*
import std.os.posix.*
import std.fs.*
import std.convert.*

let random = Random(0) // seed = 0

main() {
    let X: Array>
    let y: Array

    (X, y) = getData()

    let dense1 = Layer_Dense(2, 4, X.size)
    let activation1 = Activation_ReLU()

    let dense2 = Layer_Dense(4, 4, X.size)
    let activation2 = Activation_ReLU()

    let dense3 = Layer_Dense(4, 3, X.size)

    dense1.forward(X)
    activation1.forward(dense1.output)
    println(activation1.output[..5])

    dense2.forward(activation1.output)
    activation2.forward(dense2.output)
    println(activation2.output[..5])

    dense3.forward(activation2.output)
    println(dense3.output[..5])
}

class Activation_ReLU {
    var output: Array>

    public init() {
        this.output = []
    }

    public func forward(inputs: Array>) {
        let output = ArrayList>([])

        for (array in inputs) {
            output.append(maximum(array))
        }

        this.output = output.toArray()
    }

    private func maximum(input: Array): Array {
        func clip(i: Float64): Float64 {
            if (i > 0.0) {
                return i
            } else {
                return 0.0
            }
        }

        let output = input |> map {i => clip(i)} |> collectArray

        return output
    }
}

class Layer_Dense {
    var weights: Matrix
    var biases: Matrix
    var output: Array>

    public init(nInputs: Int64, nNeurons: Int64, batchSize: Int64) {
        this.weights = Matrix(
            Array>(nNeurons, {_ => Array(nInputs, {_ => random.nextFloat64() * 0.01})}))

        this.biases = Matrix(Array>(batchSize, {_ => Array(nNeurons, {_ => 0.0})}))
        this.output = []
    }

    public func forward(inputs: Array>) {
        this.output = Matrix(inputs).times(this.weights.transpose()).plus(this.biases).getArray()
    }
}

f

你可能感兴趣的:(神经网络,深度学习,华为,开发语言)