我有这个自定义对象:
class Neuron(object):
def __init__(self):
self.bias = 1.0
self.weightBias = uniform(-1, 1)
# self.weightBias = 1000
self.area = 0.0
self.weightArea = uniform(-1, 1)
# self.weightArea = 1000
self.perimeter = 0.0
self.weightPerimeter = uniform(-1, 1)
# self.weightPerimeter = 1000
self.compactness = 0.0
self.weightCompactness = uniform(-1, 1)
# self.weightCompactness = 1000
self.length = 0.0
self.weightLength = uniform(-1, 1)
# self.weightLength = 1000
self.width = 0.0
self.weightWidth = uniform(-1, 1)
# self.weightWidth = 1000
self.asymmetryCoefficient = 0.0
self.weightAsymmetryCoefficient = uniform(-1, 1)
# self.weightAsymmetryCoefficient = 1000
self.lengthGroove = 0.0
self.weightLengthGroove = uniform(-1, 1)
# self.weightLengthGroove = 1000
self.activationValue = 0.0
self.output = 0
我有条件,在其中有一个我的Neuron类的实例添加到名为errorNeuron1
的列表中:
# Calculates the output, 0 or 1, for the neuron
def calculateOutput(neuron):
activationValue = neuron.bias * neuron.weightBias
activationValue += neuron.area * neuron.weightArea
activationValue += neuron.perimeter * neuron.weightPerimeter
activationValue += neuron.compactness * neuron.weightCompactness
activationValue += neuron.length * neuron.weightLength
activationValue += neuron.width * neuron.weightWidth
activationValue += neuron.asymmetryCoefficient * neuron.weightAsymmetryCoefficient
activationValue += neuron.lengthGroove * neuron.weightLengthGroove
neuron.activationValue = activationValue
neuron.output = 1 if activationValue >= 0 else 0
return neuron
# Retrieves data from row
def parseRow(neuron, row):
neuron.area = float(row[1][0])
neuron.perimeter = float(row[1][1])
neuron.compactness = float(row[1][2])
neuron.length = float(row[1][3])
neuron.width = float(row[1][4])
neuron.asymmetryCoefficient = float(row[1][5])
neuron.lengthGroove = float(row[1][6])
return neuron
def main():
df = importCSV('trainSeeds.csv')
neuron1 = Neuron()
neuron2 = Neuron()
errorNeuron1 = []
errorNeuron2 = []
for i in range(0, 1):
# Iterate over dataframe rows
for row in df.iterrows():
neuron1 = parseRow(neuron1, row)
# print(neuron1.area)
neuron1 = calculateOutput(neuron1)
expectedOutput = row[1][7]
neuron2 = parseRow(neuron2, row)
neuron2 = calculateOutput(neuron2)
expectedOutputBinary = format(int(expectedOutput), '02b')
# If the expectedOutput is equal to the output of the first node
if int(expectedOutputBinary[0]) != neuron1.output:
print(neuron1.activationValue)
errorNeuron1.append((neuron1.activationValue, row))
# neuron1 = calculateNewWeights(neuron1.output, neuron1, int(expectedOutputBinary[0]))
elif int(expectedOutputBinary[1]) != neuron2.output:
errorNeuron2.append(neuron2)
# neuron2 = calculateNewWeights(neuron1.output, neuron2, int(expectedOutputBinary[1]))
# errorNeuron1.sort(key=lambda x: x.activationValue)
print("----------------------------------------")
for n in errorNeuron1:
print n
如何将自定义对象添加到列表,然后在不获取垃圾重复值的情况下打印每个值?另外,在这种情况下到底发生了什么?
这是我在添加到errorNeuron
之前打印时的输出:
1.37930801491 1.3248274347 1.38542318785 1.39868417375 1.32901788598 1.38486475771 1.42985927348 1.41234073472 1.39305049211 1.36992907231 1.44747542914 1.3691793289 1.44158908789 1.4216161728 1.40943076719 1.41935402214 1.47371279276 1.33364482064 1.32809690259 1.46776554321 1.42706344947 1.41345102369 1.30522785708 1.38857331887 1.35931718327 1.31726730896 1.43862228814 1.41687773781 1.4188485406 1.45373663027 1.36475688453 1.41244050385 1.47747389879 1.39913433929 1.39572965064 1.37398585466 1.39037534707 1.38703336165 1.43049919876 1.53328689772 1.42474806554 1.41506511897 1.40265486573 1.49820537833 1.42285883588 1.36153921531 1.34902938453 1.40761425156 1.39036430332 1.38797706537 1.45461203476 1.47017316218 1.45939109969 1.43000636609 1.38050268146
这是我遍历errorNeuron1
中的每个项目时的输出
1.50138570648 1.50138570648 1.50138570648 1.50138570648 1.50138570648 1.50138570648 1.50138570648 1.50138570648 1.50138570648 1.50138570648 1.50138570648 1.50138570648 1.50138570648 1.50138570648 1.50138570648 1.50138570648 1.50138570648 1.50138570648 1.50138570648 1.50138570648 1.50138570648 1.50138570648 1.50138570648 1.50138570648 1.50138570648 1.50138570648 1.50138570648 1.50138570648 1.50138570648 1.50138570648 1.50138570648 1.50138570648 1.50138570648 1.50138570648 1.50138570648 1.50138570648 1.50138570648 1.50138570648 1.50138570648 1.50138570648 1.50138570648 1.50138570648 1.50138570648 1.50138570648 1.50138570648 1.50138570648 1.50138570648 1.50138570648 1.50138570648 1.50138570648 1.50138570648 1.50138570648 1.50138570648 1.50138570648 1.50138570648