原理自己上网找,这儿只用简略的示例来展示原理,输入一句话,用 BI-LSTM 猜测下一个词。

import tensorflow as tf
import numpy as np
sentenceAPP = 'Lorem ipsum dolor sit amet consectetur adipisicing elit sed do eiusmod tempor incididunt ut labore et dolore magna aliqua Ut enim ad minim veniam quis nostrud exercitation'
# UNK 单词作为占位符号
words = ['UNK'] + list(set([word for word in sentences.split()]))
# 生成单词和索引的映射联络
word2idx = {v:k for k,vgit指令 in enumerattensorflow版别e(words)}
idx2word = {k:v for k,v in enumerate(words)}
V = len(words)  # 字典巨细
step = len(sentence.split())  # 这儿将 sentence 的长度作为了输入语句的长度
hidden = 50   # 隐层巨细
dim = 50  # 词向量维度
def make_batch(sappointmententence):
input_batch, target_batch = [],[]
words = sentence.split()
for i,word in enumerate(words[:-1]):
input = [word2idx[word] for word in words[:i+1]tensorflow动态链接库初始化失利]  # 将不同长度的语句转换成索引列表,当作Git输入的语句
input += [0] *TensorFlow (step - len(input))  # 将长度缺乏 sttensorflow训练模型ep 的语句用 UNK 补齐
targetensorflow怎样读t = word2idx[words[i+1]]  # 用 input 语句的下一个单词索引作为标签
input_batch.append(input)
target_batch.append(np.eye(V)giti轮胎是什么品牌[target])  # 将 target 转换为 onAPPe-hot 编码,之后的 softmax_cross_entropy_with_logits_v2 会用到
return itensorflow装置教程nput_bgithub中文官网网页atch, target_batchgithub
tf.reset_default_graph()
# 初始化 词向量
embedding = tf.get_variable(name="embedgithubding", shape=[V, dim], inappreciateitializer=tf.random_normal_inititensorflow怎样读alizer)
X = tf.placeholder(tf.git教程int32, [None, step])
# 词嵌入
XX = tf.nn.embedding_lookup(embedding, X)
Y = tf.plappreciateaceholder(tf.int32, [None, V])
# 前向 LSTM
lstm_fw_cell = tf.nn.rnn_cell.LSTMCell(hidden)
# 反向 LSTM
lstm_bw_cell = tf.nn.rnn_cell.LSTMCell(hidden)
# 隐层核算
# outputs : (fw=[bapproveatch_size, step, hidden], bw=[batch_size,apple step, hidden])
# states : (fw=(capplication=[batch_size, hidden], h=[batgithubch_size, hidden]), bw=(c=[batch_size, hidden], h=[batch_size, hidden]))
outputs, states = tf.nn.bidirectional_dynamic_rnn(lstm_fw_cell, lstm_bw_cell, XX, dtype=tf.float32)
# 将最终输出的正反方向的向量进行拼接
outputs = tf.concat([outputs[0], outputs[1]], 2) # [batch_size, step, 2*hidden]
outputs = tf.transpTensorFlowose(outputs, [1,0,2])[-1]    # [batcgit指令h_size, 2*hiddeAPPn]
# 联接隐层和分类器的权tensorflow装置教程重参数和偏置参数
W = tf.Varigitlabable(tf.random_normal([2*hidden, V]))application
b = tf.Variable(tf.randapplicationom_normal([V]))
# 成果概giti轮胎是什么品牌率
logits = tf.matensorflow是干什么的tmul(outputs, W) + b  # [batch_size, V]
# 核算丢失并进行优化
cost = tf.reduce_mean(tf.nn.softmax_cross_eappointmentntropy_with_logits_v2(logits=lotensorflow和pytorchgits, labels=Y))
optimizer = tf.train.AdamOptimgit教程izer(0.001).minimize(cost)
# 猜tensorflow怎样读想
prediction = tf.argmax(logits, 1)
init = tf.gAPPlobal_variables_initializer()
sess = tf.githubSession()
sess.runGit(init)
# 出产输入和标签
input_batch, target_batch = make_batch(sentence)
# 练习模型
for epoch in range(3000):
_, loss = sess.run([optimizer, cost], feed_dict={X:input_batch, Y:target_batch})
if (epoch+1)%500 == 0:
print("epoch=",'%04d'%(epotensorflow版别ch+1)," loss=",Git '%04f'%(loss))
# 猜测
predict = sess.run([prediction], feed_dict={git指令X:input_batch})
for i,idxs in enumerateapproach(input_batch):
print(" ".join([idx2word[idx] for idx in idxs if idx!=0]) ,' 猜测单词:',idx2word[predict[0][i]], ' 实在单词:', idx2word[np.argmax(target_batch[i])])

成果打印:

epoch= 0500  loss= 0.001467
epoch= 1000  loss= 0.000409
eapprovepoch= 15tensorflow装置00  loss= 0.000199
epoch= 2000  loss= 0.000116
epoch= 2500  loss= 0.000074
epoch= 3000  loss= 0.000050
Lorem  猜测单词: ipsum  实在单词: ipsum
Lorem ipsum  猜测单词: dolor  实在单词: dolor
Lorem ipsum dolor  猜测单词: sit  实在单词: sit
Lorem ipsum dolor sit  猜测单词: amet  实在单词: amet
Lorem ipsum dolor sit amet  猜测单词: consectappointmentetur  实在单词: consectetur
Lorem ipsum dolapproachor sit amtensorflow2et consectetur  猜测单词: adipisicing  实在单词: adipisicing
Lorem ipsum dolor sGitit amet consectetur adipisicintensorflow动态链接库初始化失利g  猜测单词: elit  实在单词: elit
Logithub中文官网网页rem ipsum dolor sit amet consectetur adipisicing eapproachlit  猜测单giti轮胎是什么品牌词: sappleed  实在单词: sed
LoremGit ipsum dolor sit amet constensorflow是干什么的ectetur adipigit指令sicing elit segitlabd  猜测单词: do  实在单词:appointment do
Lorem ipsum dolor sit amet consectetur adipisicing elit sed do  猜测单词: eiusmod  实在单词: eiusmod
Lorem ipsum dgitiolor sit amet consectetur adipisicing elit sed do eiusmod  猜测单词: tempor  实在单词: tempor
Lorem ipsum dolor sit amet consectetur adtensorflow动态链接库初始化失利ipisicing elit sed dogitee eiusmod tempor  猜测单词: incididunt  实在单词: incididunt
Lorem ipsum dolor sit amet conseGitctetur adipisicing elit sed do eiusmod tempor incididunt  猜测单词: ut  实在单词: ut
Lorem ipsum dolor sit amet consectetur adipistensorflow是干什么的icing elit sed do eiusmod tempor incididunt ut  猜测单词: labore  实在单词: labore
Loremtensorflow训练模型 ipsum dolor sit amet consectetur adappleipisicingiteeg elit sed do eiusmod tempor incididunt ut labore  猜测单词: et  实在单词: et
Lorem ipsum dolor sit amet consectetur adipisicing elit sed do eiusmod tgithubempor incididunt ut labore et  猜测单词: dolore  实在单词: dolore
Lorem ipsum dolor sit amet consectetur adipisiciappstoreng elit sed do eiusmod tempor incididunt ut labore et dolore  猜测单词: magna  实在单词: magna
Lorem ipsum dolor sit amet consectetur adipisicing elit sed do eiusgithub开放私库mod tempor incididunt ut labore et dolore maggithub开放私库na  猜测单词: aliqua  实在单词: aliqua
Lorem ipsum dolor sit amet consectetur adipisicing elit sed dogitlab eiusmod tapproveempor incididunt ut labore et dolore magna aliqua  猜测单词: Ut  实在git教程单词: Ut
Lorem ipsum dolor sit amet consectetur adipisicing elit sed do eiugitlabsmod tempor incididgitlabunt ut labore et dolore magna aliqua Utapplication  猜测单词: enim  实在单词github永久回家地址: enim
Lorem ipsgithub中文官网网页um dolor sit amet consecteturappreciate adipisicing elit sed do eiusmod teappstorempor incididunt ut labore et dolore mAPPagna aliqua Ut eniapplicationm  猜测单词: ad  实在单词: ad
Loremapprove ipsumapp装置下载 dolor sit amet consectetur adipisicingitlabg elit sed do eiusmod tempor incidGitidunt ut labore et doappreciatelore magna aliqua Ut enim ad  猜测单词: minim  实在单词: minim
Lorem ipsum dolor sit amet consectetur adipisicing elit sed do eiusmod tempor incididunt ut labore et dolore matensorflow怎样读gna aliqua Ut enim ad minim  猜测单词: veniamgithub中文官网网页  实在单词: veniam
Lorem ipsum dolor sit amet consectetur adipisicing elit sappeared do eiapp装置下载usmod temporGit incididunt ut labore et dolore magna aliqua Ut enim ad miappointmentnim veniam  猜测单词: quis  实在单词: quis
Lorem igithubpsum dolor sit amet consectetur adipisictensorflow版别ing elit sed do eiusmod tempor incididunt ut labore et dgit教程olore magna alitensorflow动态链接库初始化失利qua Ut enim ad minigiti轮胎是什么品牌m veniam quis  猜测单词: nostrud  实在单词: nostrud
Lorem ipsum dolor sit amet consectetur adtensorflow菜鸟教程ipisicing elit sed do eiusmod tempor incididunt ut labore et doTensorFlowlore magna aliqua Ut enim ad minim veniam quis nostrGitud  猜测单词: exercitation  实在单词: exercitation