在Keras中實現文本生成任務通常使用循環神經網絡(RNN)或者長短時記憶網絡(LSTM)。以下是一個簡單的例子,以生成莎士比亞風格的文本為例:
from keras.models import Sequential
from keras.layers import LSTM, Dense
text = # 輸入文本數據
maxlen = 40
step = 3
sentences = []
next_chars = []
for i in range(0, len(text) - maxlen, step):
sentences.append(text[i: i + maxlen])
next_chars.append(text[i + maxlen])
x = np.zeros((len(sentences), maxlen, len(chars)), dtype=np.bool)
y = np.zeros((len(sentences), len(chars)), dtype=np.bool)
for i, sentence in enumerate(sentences):
for t, char in enumerate(sentence):
x[i, t, char_indices[char]] = 1
y[i, char_indices[next_chars[i]]] = 1
model = Sequential()
model.add(LSTM(128, input_shape=(maxlen, len(chars)))
model.add(Dense(len(chars), activation='softmax'))
model.compile(loss='categorical_crossentropy', optimizer='rmsprop')
model.fit(x, y, batch_size=128, epochs=60)
def generate_text(model, start_string, length=400, temperature=0.5):
generated = start_string
for i in range(length):
x_pred = np.zeros((1, maxlen, len(chars)))
for t, char in enumerate(start_string):
x_pred[0, t, char_indices[char]] = 1.
preds = model.predict(x_pred, verbose=0)[0]
next_index = sample(preds, temperature)
next_char = indices_char[next_index]
generated += next_char
start_string = start_string[1:] + next_char
return generated
以上是一個簡單的文本生成任務的實現步驟,可以根據具體需求和數據進行調整和優化。