我正在尝试实施此网络(https://arxiv.org/pdf/1604.03505) (源代码文件 - https://gist.github.com/kanihal/8b981b75cb6f22264c0af03be79b243a) 这是使用功能api给出的正确的网络实现吗?
我是如何将输入传递给bi-lstm的?我应该使用合并图层吗?如果是,那么如何?
如何使用顺序模型执行此操作?用fork ??
这是我编写的模型的代码片段,
N = 3
fc7 = np.ndarray(shape=(1,4096), dtype=float) # dummy vgg fc7 is 1 row of 4096 numbers
cells_fc7 = np.array([[fc7 for j in range(N)] for i in range(N)])
#getfc7 for cells code here - dont run this for now
for i in range(N):
for j in range(N):
cells_fc7[i][j] = getfc7(cells[i][j])
#model code starts here
#list of inputs
cells_in=[]
for i in range(N):
for j in range(N):
# cells_rf[i][j] = sharedfc(cells_fc7[i][j])
cells_in.append(Input(shape=(1,4096)))
#shared fully connected layer1
sharedfc = Dense(output_dim=500,input_dim=4096,activation='relu')
cells_rf=[]
# cells_rf = np.array([[rf for j in range(N)] for i in range(N)])
for i in range(N):
for j in range(N):
# cells_rf[i][j] = sharedfc(cells_fc7[i][j])
cells_rf.append(sharedfc(cells_in[i][j]))
#replace with merge layer????
t = np.array(cells_rf)
top1=Bidirectional(LSTM(500, return_sequences=True), input_shape=(N*N, 500)) #input shape(num of vectors in the sequence, size of vector)
t1=top1(t)
top2=Bidirectional(LSTM(1000,return_sequences=True))
t_out=top2(t1)
b = np.array(cells_rf)
btm1=Bidirectional(LSTM(500, return_sequences=True), input_shape=(N*N, 500)) #input shape(num of vectors in the sequence, size of vector)
b1=btm1(a)
btm2=Bidirectional(LSTM(1000,return_sequences=True))
b_out=btm2(t1)
# we can then concatenate the two vectors:
merged_vector = merge([t_out, b_out], mode='concat')
n_classes = 80
sharedfc_out = Dense(output_dim=n_classes,input_dim=4000,activation='relu')
#partial counts
pc = np.ndarray(shape=(1,n_classes), dtype=float)
cells_pc = np.array([[pc for j in range(N)] for i in range(N)])
outpc=[]
for i in range(N):
for j in range(N):
cells_pc[i][j] = sharedfc_out(merged_vector[N*i+j])
outpc.append(sharedfc_out(merged_vector[N*i+j]))
model = Model(input=cells_in, output=outpc)