How would one concatenate layers like in Keras: main_l = concatenate([rnn_layer, price, ...])?

I am trying to port a kaggle kernel written in Keras to pytorch:

https://www.kaggle.com/shanth84/fast-text-rnn-keras-0-2276/notebook

However, I am not sure how to concatenate layers like in Keras in pytorch (scroll down for more context):

main_l = concatenate([
          rnn_layer1
        , Flatten() (emb_region)
        , Flatten() (emb_city)
        , Flatten() (emb_category_name)
        , Flatten() (emb_parent_category_name)
        , Flatten() (emb_param_1)
        , Flatten() (emb_param123)
        , price
        , item_seq_number
    ])

more context:


def RNN_model():

    #Inputs
    seq_title_description = Input(shape=[X_train["seq_title_description"].shape[1]], name="seq_title_description")
    region = Input(shape=[1], name="region")
    city = Input(shape=[1], name="city")
    category_name = Input(shape=[1], name="category_name")
    parent_category_name = Input(shape=[1], name="parent_category_name")
    param_1 = Input(shape=[1], name="param_1")
    param123 = Input(shape=[1], name="param123")
    price = Input(shape=[1], name="price")
    item_seq_number = Input(shape = [1], name = 'item_seq_number')
    
    #Embeddings layers

    emb_seq_title_description = Embedding(vocab_size, EMBEDDING_DIM1, weights = [embedding_matrix1], trainable = True)(seq_title_description)
    emb_region = Embedding(max_region, 10)(region)
    emb_city = Embedding(max_city, 10)(city)
    emb_category_name = Embedding(max_category_name, 10)(category_name)
    emb_parent_category_name = Embedding(max_parent_category_name, 10)(parent_category_name)
    emb_param_1 = Embedding(max_param_1, 10)(param_1)
    emb_param123 = Embedding(max_param123, 10)(param123)

    rnn_layer1 = GRU(25) (emb_seq_title_description)
    
    #main layer
    main_l = concatenate([
          rnn_layer1
        , Flatten() (emb_region)
        , Flatten() (emb_city)
        , Flatten() (emb_category_name)
        , Flatten() (emb_parent_category_name)
        , Flatten() (emb_param_1)
        , Flatten() (emb_param123)
        , price
        , item_seq_number
    ])
    
    main_l = Dropout(0.1)(Dense(512,activation='relu') (main_l))
    main_l = Dropout(0.1)(Dense(64,activation='relu') (main_l))
    
    #output
    output = Dense(1,activation="sigmoid") (main_l)
    
    #model
    model = Model([seq_title_description, region, city, category_name, parent_category_name, param_1, param123, price, item_seq_number ], output)
    model.compile(optimizer = 'adam',
                  loss= root_mean_squared_error,
                  metrics = [root_mean_squared_error])
    return model

def rmse(y, y_pred):

    Rsum = np.sum((y - y_pred)**2)
    n = y.shape[0]
    RMSE = np.sqrt(Rsum/n)
    return RMSE 

def eval_model(model):
    val_preds = model.predict(X_valid)
    y_pred = val_preds[:, 0]
    
    y_true = np.array(y_valid_f)
    
    yt = pd.DataFrame(y_true)
    yp = pd.DataFrame(y_pred)
    
    print(yt.isnull().any())
    print(yp.isnull().any())
    
    v_rmse = rmse(y_true, y_pred)
    print(" RMSE for VALIDATION SET: "+str(v_rmse))
    return v_rmse

exp_decay = lambda init, fin, steps: (init/fin)**(1/(steps-1)) - 1

use torch.cat to concatenate tensors.

2 Likes

So, there is no way to actually concatenate layers (not tensors) like in the Keras API?

sorry I wrongly read your question.

You could use torch.nn:Sequential like the Sequential behavior in Keras or torch.nn.ModuleList to iterate over the Layers/Modules.