我在python中建立了一个基本的神经网络。其思想是,神经网络可以有任何你想要的结构,而不仅仅是标准层,每个神经元都连接到下一层的每一个神经元。
from numpy import exp
class Feed_forward_network:
"""
Feed_forward_network
inputs: the number of inputs, int
outputs: the number of outputs, int
neuron_data: the neuron data, list of tuples|None
the first inputs of neuron_data needs to be None
each item in neuron_data is data about the neuron
tuple[0]: the activation function of the neuron, function(float) -> float
tuple[1]: the bias of the neuron, float
tuple[2]: the connections of the neuron, list of tuples
each item in connections is data about the connection
tuple[0]: the neuron the connection is to
tuple[1]: the weight of the connection
"""
def __init__(self, inputs: int, outputs: int, neuron_data):
if outputs > len(neuron_data):
raise RuntimeError("outputs < len(neuron_data)")
self.inputs = inputs
self.outputs = outputs
self.neuron_data = neuron_data
self.neuron_values = [None]*(len(neuron_data))
def activate(self, inputs):
if self.inputs != len(inputs):
raise RuntimeError("self.inputs != len(inputs)")
self.neuron_values = [None]*len(self.neuron_values)
for i in range(len(inputs)):
self.neuron_values[i] = inputs[i]
return tuple([self.calculate_neuron(i+self.inputs) for i in range(self.outputs)])
def calculate_neuron(self, neuron):
if neuron < self.inputs:
return self.neuron_values[neuron]
neuron_value = self.neuron_values[neuron]
if neuron_value == None:
neuron_data = self.neuron_data[neuron]
self.neuron_values[neuron] = 0 # avoid RecursionError
value = neuron_data[0](sum([self.calculate_neuron(conn)*weight for conn, weight in neuron_data[2]]) * neuron_data[1])
self.neuron_values[neuron] = value
return value
return neuron_value
def sigmoid(x: float) -> float:
return 1 / (1 + exp(-(x)))
if __name__ == "__main__":
ffn = Feed_forward_network(2, 1, [None, None, (sigmoid, 1.0, [(0, 1.0), (1, 1.0), (3, 1.0)]), (sigmoid, 1.0, [(0, 1.0), (1, 1.0), (3, 1.0)])])
print(ffn.activate([1, 1]))
print(ffn.neuron_values)我主要是在寻找性能改进,所以下面是分析时的输出(我做了一个for循环,重复if __name__ == "__main__"位100次,因为它们都是0.000次):
ncalls tottime percall cumtime percall filename:lineno(function)
1 0.000 0.000 0.000 0.000 <string>:1(__new__)
100 0.001 0.000 0.001 0.000 <string>:20(__init__)
100 0.001 0.000 0.009 0.000 <string>:28(activate)
1 0.000 0.000 0.000 0.000 <string>:3(Feed_forward_network)
100 0.000 0.000 0.008 0.000 <string>:37(<listcomp>)
700/100 0.002 0.000 0.008 0.000 <string>:39(calculate_neuron)
200/100 0.001 0.000 0.005 0.000 <string>:50(<listcomp>)
200 0.005 0.000 0.005 0.000 <string>:58(sigmoid)https://codereview.stackexchange.com/questions/285173
复制相似问题