|
楼主 |
发表于 2024-12-12 16:39:42
|
显示全部楼层
output是哪一步得到的结果
class Unit:
def __init__(self, length):
self.weight = [rand(-0.2, 0.2) for i in range(length)]#初始为0.2
self.change = [0.0] * length
self.threshold = rand(-0.2, 0.2)#初始为0.2
#self.change_threshold = 0.0
def calc(self, sample):
self.sample = sample[:]
partsum = sum([i * j for i, j in zip(self.sample, self.weight)]) - self.threshold
self.output = sigmoid(partsum)
return self.output
def update(self, diff, rate=0.5, factor=0.1):
change = [rate * x * diff + factor * c for x, c in zip(self.sample, self.change)]
self.weight = [w + c for w, c in zip(self.weight, change)]
self.change = [x * diff for x in self.sample]
#self.threshold = rateN * factor + rateM * self.change_threshold + self.threshold
#self.change_threshold = factor
def get_weight(self):
return self.weight[:]
def set_weight(self, weight):
self.weight = weight[:]
class Layer:
def __init__(self, input_length, output_length):
self.units = [Unit(input_length) for i in range(output_length)]
self.output = [0.0] * output_length
self.ilen = input_length
def calc(self, sample):
self.output = [unit.calc(sample) for unit in self.units]
return self.output[:]
def update(self, diffs, rate=0.0005, factor=0.00001):
for diff, unit in zip(diffs, self.units):
unit.update(diff, rate, factor)
def get_error(self, deltas):
def _error(deltas, j):
return sum([delta * unit.weight[j] for delta, unit in zip(deltas, self.units)])
return [_error(deltas, j) for j in range(self.ilen)]
def get_weights(self):
weights = {}
for key, unit in enumerate(self.units):
weights[key] = unit.get_weight()
return weights
def set_weights(self, weights):
for key, unit in enumerate(self.units):
unit.set_weight(weights[key])
class BPNNet:
def __init__(self, ni, nh, no):
# number of input, hidden, and output nodes
self.ni = ni + 1 # +1 for bias node
self.nh = nh
self.no = no
self.hlayer = Layer(self.ni, self.nh)
self.olayer = Layer(self.nh, self.no)
def calc(self, inputs):
if len(inputs) != self.ni-1:
raise ValueError('wrong number of inputs')
# input activations
self.ai = inputs[:] + [1.0]
# hidden activations
self.ah = self.hlayer.calc(self.ai)
# output activations
self.ao = self.olayer.calc(self.ah)
return self.ao[:]
def update(self, targets, rate, factor):
if len(targets) != self.no:
raise ValueError('wrong number of target values')
# 使用Softmax时,输出层delta计算通常较为直接
output_deltas = [dsigmoid(ao) * (target - ao) for target, ao in zip(targets, self.ao)] # 假设self.outputs为Softmax的输出
# 计算隐藏层误差项
hidden_deltas = np.dot(self.olayer.weights.T, output_deltas) * dsigmoid(self.hidden_layer_output)
# 更新输出层权重
self.olayer.update(output_deltas, rate, factor)
# 更新输入层权重
self.hlayer.update(hidden_deltas, rate, factor)
# 计算误差
return np.sum(0.5 * (targets - self.outputs) ** 2)
|
|