'Bidirectional RNN - 'NoneType' object has no attribute '_inbound_nodes' error
I wanna build a Recurrence model which do POS(Part Of Speech tagging) task.
It gets input sentence and outputs tags of each words:
DT NN VBD DT NN
the dog ate the cat
I wanna use 'bidirectional convolution' idea:
input(forward way) -> Recurrence layer -> Conv layer
input(backward way) -> Recurrence layer -> Conv layer
and then do concatenate both,
get the output through the Dense and Time-Distributed layer:
Here's the main.
I wrote the code like:
def make_model(input):
Embeded_input = L.Embedding(len(all_words),32,name='a')(input) # batch, 10002, 32
RNN = keras.models.Sequential()
RNN.add(L.GRU(64, return_sequences=True, recurrent_dropout=0.3,name='b'))
RNN.add(L.LSTM(64, return_sequences=True, recurrent_dropout=0.1,name='c'))
forward1 = RNN(Embeded_input)
backward1 = RNN(Embeded_input[:,::-1,:])
CNN = keras.models.Sequential()
CNN.add(L.Conv1D(32, 3, padding='same', activation='relu',name='d'))
CNN.add(L.Conv1D(32, 3, padding='same', activation='relu',name='e'))
forward2 = CNN(forward1)
backward2 = CNN(backward1)
concat = L.concatenate([forward2, backward2],name='f')
stepwise_dense = L.Dense(len(all_tags),activation='softmax',name='g')
stepwise_dense = L.TimeDistributed(stepwise_dense,name='h')
OUTPUT = stepwise_dense(concat)
return OUTPUT
inputs = L.Input((None,), dtype='int32', name='input')
outputs = make_model(inputs)
model = keras.models.Model(inputs=inputs, outputs=outputs, name='POS_improved_07')
model.summary()
and get the Error from the second latest line:
AttributeError Traceback (most recent call last)
<ipython-input-76-cf1cead269f5> in <module>()
27 outputs = BIRNN(input)
28
---> 29 model = keras.models.Model(inputs=inputs, outputs=outputs, name='POS_improved_07')
30
31 model.summary()
8 frames
/tensorflow-1.15.2/python3.7/keras/engine/network.py in build_map(tensor, finished_nodes, nodes_in_progress, layer, node_index, tensor_index)
1391 ValueError: if a cycle is detected.
1392 """
-> 1393 node = layer._inbound_nodes[node_index]
1394
1395 # Prevent cycles.
AttributeError: 'NoneType' object has no attribute '_inbound_nodes'
ps. where input comes from
import nltk
import numpy as np
nltk.download('brown')
nltk.download('universal_tagset')
data = nltk.corpus.brwon.tagged_sents(tagset='universal')
all_tags = ['#EOS#','#UNK#','ADV', 'NOUN', 'ADP', 'PRON', 'DET', '.', 'PRT', 'VERB', 'X', 'NUM', 'CONJ', 'ADJ']
data = np.array([[(word.lower(),tag) for word,tag in sentence] for sentence in data ])
and then
from collections import Counter
word_counts = Counter()
for sentence in data:
words,tags = zip(*sentence)
word_counts.update(words)
all_words = ['#EOS#','#UNK#']+list(list(zip(*word_counts.most_common(10000)))[0]) # len=10002
with generate_batches:
def to_matrix(lines,token_to_id,max_len=None,pad=0,dtype='int32',time_major=False):
"""Converts a list of names into rnn-digestable matrix with paddings added after the end"""
max_len = max_len or max(map(len,lines))
matrix = np.empty([len(lines),max_len],dtype)
matrix.fill(pad)
for i in range(len(lines)):
line_ix = list(map(token_to_id.__getitem__,lines[i]))[:max_len]
matrix[i,:len(line_ix)] = line_ix
return matrix.T if time_major else matrix
from keras.utils.np_utils import to_categorical
BATCH_SIZE=32
def generate_batches(sentences,batch_size=BATCH_SIZE,max_len=None,pad=0):
assert isinstance(sentences,np.ndarray),"Make sure sentences is a numpy array"
while True:
indices = np.random.permutation(np.arange(len(sentences)))
for start in range(0,len(indices)-1,batch_size):
batch_indices = indices[start:start+batch_size]
batch_words,batch_tags = [],[]
for sent in sentences[batch_indices]:
words,tags = zip(*sent)
batch_words.append(words)
batch_tags.append(tags)
batch_words = to_matrix(batch_words,word_to_id,max_len,pad)
batch_tags = to_matrix(batch_tags,tag_to_id,max_len,pad)
batch_tags_1hot = to_categorical(batch_tags,len(all_tags)).reshape(batch_tags.shape+(-1,))
yield batch_words,batch_tags_1hot
the model will be trained as:
model.compile('adam','categorical_crossentropy')
model.fit_generator(generate_batches(train_data),
len(train_data)/BATCH_SIZE,
callbacks=[
EvaluateAccuracy(),
save_in_drive(model_filename, MODEL_SAVE_FOLDER_PATH)
],
epochs=5)
Sources
This article follows the attribution requirements of Stack Overflow and is licensed under CC BY-SA 3.0.
Source: Stack Overflow
| Solution | Source |
|---|

