具有多个输入和多个输出的 keras 中的自定义层

数据挖掘 深度学习 张量流 喀拉斯
2022-02-09 03:06:32

我在 keras 中实现了一个自定义层,它接受多个输入并产生多个输出形状。我的代码如下:

class Attention(Layer):
def __init__(self, max_input_left=MAX_SEQUENCE_LENGTH,max_input_right=MAX_SEQUENCE_LENGTH,
             filter_sizes=[1,2,3], num_filters=400, **kwargs):
    self.init = initializers.get('normal')
    self.max_input_left = max_input_left
    self.max_input_right = max_input_right
    self.filter_sizes = filter_sizes
    self.num_filters = num_filters
    if self.num_filters!=0:
        self.num_filters_total=self.num_filters * len(self.filter_sizes)   
    super(Attention, self).__init__(** kwargs)

def build(self, input_shape):
    input_dim=input_shape[1][-1]*len(self.filter_sizes)
    self.U = self.init((input_dim,input_dim))
    super(Attention, self).build(input_shape)

def compute_mask(self, inputs, mask=None):
    mask = super(Attention, self).compute_mask(inputs, mask)
    return mask

def call(self,inputs, mask=None):
    input_left = inputs[0]
    input_right = inputs[1]
    Q = tf.reshape(input_left,[-1,self.max_input_left,len(self.filter_sizes) * self.num_filters],name = 'Q')
    A = tf.reshape(input_right,[-1,self.max_input_right,len(self.filter_sizes) * self.num_filters],name = 'A')
        # G = tf.tanh(tf.matmul(tf.matmul(Q,self.U),\
        # A,transpose_b = True),name = 'G')
    print Q
    print tf.reshape(Q,[-1,len(self.filter_sizes) * self.num_filters])
    first = tf.matmul(tf.reshape(Q,[-1,len(self.filter_sizes) * self.num_filters]),self.U)

    second_step = tf.reshape(first,[-1,self.max_input_left,len(self.filter_sizes) * self.num_filters])
    result = tf.matmul(second_step,tf.transpose(A,perm = [0,2,1]))
        # print 'result',result
    G = tf.tanh(result)
        # G = result
        # column-wise pooling ,row-wise pooling
    row_pooling = tf.reduce_max(G,1,True,name = 'row_pooling')
    col_pooling = tf.reduce_max(G,2,True,name = 'col_pooling')

    self.attention_q = tf.nn.softmax(col_pooling,1,name = 'attention_q')
    print self.attention_q
    self.attention_a = tf.nn.softmax(row_pooling,name = 'attention_a')
    R_q = tf.reshape(tf.matmul(Q,self.attention_q,transpose_a = 1),[-1,self.num_filters * len(self.filter_sizes)],name = 'R_q')
    R_a = tf.reshape(tf.matmul(self.attention_a,A),[-1,self.num_filters * len(self.filter_sizes)],name = 'R_a')
    self.output_dim1  = R_q
    self.output_dim2 = R_a
    return R_q,R_a  

def compute_output_shape(self, input_shapes):

    print (tuple(K.int_shape(self.output_dim1)),tuple(K.int_shape(self.output_dim2)))
    return (tuple(K.int_shape(self.output_dim1)),tuple(K.int_shape(self.output_dim2)))

我称这一层为:

  attn1, attn2 = Attention()([qns_cnn, ans_cnn])

这给了我以下错误:

Traceback (most recent call last):
File "workspace-2/SemEval/test2QA.py", line 253, in <module>
attn1 = Attention()([qns_cnn, ans_cnn])
File "/usr/local/lib/python2.7/dist-packages/keras/engine/topology.py", line 619, in __call__
arguments=user_kwargs)
File "/usr/local/lib/python2.7/dist-packages/keras/engine/topology.py", line 681, in _add_inbound_node
output_tensors[i]._keras_shape = output_shapes[i]
AttributeError: 'tuple' object has no attribute '_keras_shape'

有人可以建议问题出在哪里吗?

0个回答
没有发现任何回复~