1

I am want to build a custom Keras layer keeping the k top activation values. I am currently doing this (and its working fine) :

def max_topk_pool(x,k):
    import tensorflow as tf
    k_max =  tf.nn.top_k(x,k=k,sorted=True,name=None)
    return  k_max

def KMax(k):
    return Lambda(max_topk_pool,
                  arguments={'k':k},
                  output_shape=lambda x: (None, k))

Do you know if there is a way to build a custom Layer class "KMax" in the way shown by Keras in https://keras.io/layers/writing-your-own-keras-layers/

from keras import backend as K
from keras.layers import Layer

class MyLayer(Layer):

def __init__(self, output_dim, **kwargs):
    self.output_dim = output_dim
    super(MyLayer, self).__init__(**kwargs)

def build(self, input_shape):
    # Create a trainable weight variable for this layer.
    self.kernel = self.add_weight(name='kernel', 
                                  shape=(input_shape[1], self.output_dim),
                                  initializer='uniform',
                                  trainable=True)
    super(MyLayer, self).build(input_shape)  # Be sure to call this at the end

def call(self, x):
    return K.dot(x, self.kernel)

def compute_output_shape(self, input_shape):
    return (input_shape[0], self.output_dim)

I would like something like this :

from keras import backend as K
from keras.layers import Layer

class KMax(Layer):

def __init__(self, output_dim, **kwargs):
    self.K = K
    super(MyLayer, self).__init__(**kwargs)

def build(self, input_shape):
    <... Lambda here ?>

def compute_output_shape(self, input_shape):
    return (input_shape[0], self.K)

Thank you very much !

1 Answer 1

3

Here is what you need (based on https://github.com/keras-team/keras/issues/373):

from keras.engine import Layer, InputSpec
from keras.layers import Flatten
import tensorflow as tf


# https://github.com/keras-team/keras/issues/373
class KMaxPooling(Layer):
    """
    K-max pooling layer that extracts the k-highest activations from a sequence (2nd dimension).
    TensorFlow backend.
    """

    def __init__(self, k=1, **kwargs):
        super().__init__(**kwargs)
        self.input_spec = InputSpec(ndim=3)
        self.k = k

    def compute_output_shape(self, input_shape):
        return input_shape[0], (input_shape[2] * self.k)

    def call(self, inputs):
        # swap last two dimensions since top_k will be applied along the last dimension
        shifted_input = tf.transpose(inputs, [0, 2, 1])

        # extract top_k, returns two tensors [values, indices]
        top_k = tf.nn.top_k(shifted_input, k=self.k, sorted=True, name=None)[0]

        # return flattened output
        return Flatten()(top_k)

    def get_config(self):
        config = {'k': self.k}
        base_config = super().get_config()
        return {**base_config, **config}
Sign up to request clarification or add additional context in comments.

Comments

Your Answer

By clicking “Post Your Answer”, you agree to our terms of service and acknowledge you have read our privacy policy.

Start asking to get answers

Find the answer to your question by asking.

Ask question

Explore related questions

See similar questions with these tags.