我正在尝试使用Python创建学校项目的聊天机器人。我基于以下视频:https://www.youtube.com/watch?v=wypVcNIH6D4&list=PLzMcBGfZo4-ndH9FoC4YWHGXG5RZekt-Q。尝试运行我的代码时遇到错误。这是我收到的错误消息:
Traceback (most recent call last):
File "main.py", line 88, in <module>
model.load(".\model.tflearn")
File "C:\Users\Sachu\AppData\Local\Programs\Python\Python36\lib\site-packages\tflearn\models\dnn.py", line 308, in load
self.trainer.restore(model_file, weights_only, **optargs)
File "C:\Users\Sachu\AppData\Local\Programs\Python\Python36\lib\site-packages\tflearn\helpers\trainer.py", line 490, in restore
self.restorer.restore(self.session, model_file)
File "C:\Users\Sachu\AppData\Local\Programs\Python\Python36\lib\site-packages\tensorflow\python\training\saver.py", line 1686, in restore
{self.saver_def.filename_tensor_name: save_path})
File "C:\Users\Sachu\AppData\Local\Programs\Python\Python36\lib\site-packages\tensorflow\python\client\session.py", line 895, in run
run_metadata_ptr)
File "C:\Users\Sachu\AppData\Local\Programs\Python\Python36\lib\site-packages\tensorflow\python\client\session.py", line 1128, in _run
feed_dict_tensor, options, run_metadata)
File "C:\Users\Sachu\AppData\Local\Programs\Python\Python36\lib\site-packages\tensorflow\python\client\session.py", line 1344, in _do_run
options, run_metadata)
File "C:\Users\Sachu\AppData\Local\Programs\Python\Python36\lib\site-packages\tensorflow\python\client\session.py", line 1363, in _do_call
raise type(e)(node_def, op, message)
tensorflow.python.framework.errors_impl.NotFoundError: Unsuccessful TensorSliceReader constructor: Failed to find any matching files for C:\Users\Sachu\Documents\chatbot\model.tflearn
[[Node: save_1/RestoreV2_2 = RestoreV2[dtypes=[DT_FLOAT], _device="/job:localhost/replica:0/task:0/device:CPU:0"](_arg_save_1/Const_0_0, save_1/RestoreV2_2/tensor_names, save_1/RestoreV2_2/shape_and_slices)]]
Caused by op 'save_1/RestoreV2_2', defined at:
File "main.py", line 80, in <module>
model = tflearn.DNN(net)
File "C:\Users\Sachu\AppData\Local\Programs\Python\Python36\lib\site-packages\tflearn\models\dnn.py", line 65, in __init__
best_val_accuracy=best_val_accuracy)
File "C:\Users\Sachu\AppData\Local\Programs\Python\Python36\lib\site-packages\tflearn\helpers\trainer.py", line 147, in __init__
allow_empty=True)
File "C:\Users\Sachu\AppData\Local\Programs\Python\Python36\lib\site-packages\tensorflow\python\training\saver.py", line 1239, in __init__
self.build()
File "C:\Users\Sachu\AppData\Local\Programs\Python\Python36\lib\site-packages\tensorflow\python\training\saver.py", line 1248, in build
self._build(self._filename, build_save=True, build_restore=True)
File "C:\Users\Sachu\AppData\Local\Programs\Python\Python36\lib\site-packages\tensorflow\python\training\saver.py", line 1284, in _build
build_save=build_save, build_restore=build_restore)
File "C:\Users\Sachu\AppData\Local\Programs\Python\Python36\lib\site-packages\tensorflow\python\training\saver.py", line 765, in _build_internal
restore_sequentially, reshape)
File "C:\Users\Sachu\AppData\Local\Programs\Python\Python36\lib\site-packages\tensorflow\python\training\saver.py", line 428, in _AddRestoreOps
tensors = self.restore_op(filename_tensor, saveable, preferred_shard)
File "C:\Users\Sachu\AppData\Local\Programs\Python\Python36\lib\site-packages\tensorflow\python\training\saver.py", line 268, in restore_op
[spec.tensor.dtype])[0])
File "C:\Users\Sachu\AppData\Local\Programs\Python\Python36\lib\site-packages\tensorflow\python\ops\gen_io_ops.py", line 1113, in restore_v2
shape_and_slices=shape_and_slices, dtypes=dtypes, name=name)
File "C:\Users\Sachu\AppData\Local\Programs\Python\Python36\lib\site-packages\tensorflow\python\framework\op_def_library.py", line 787, in _apply_op_helper
op_def=op_def)
File "C:\Users\Sachu\AppData\Local\Programs\Python\Python36\lib\site-packages\tensorflow\python\framework\ops.py", line 3160, in create_op
op_def=op_def)
File "C:\Users\Sachu\AppData\Local\Programs\Python\Python36\lib\site-packages\tensorflow\python\framework\ops.py", line 1625, in __init__
self._traceback = self._graph._extract_stack() # pylint: disable=protected-access
NotFoundError (see above for traceback): Unsuccessful TensorSliceReader constructor: Failed to find any matching files for C:\Users\Sachu\Documents\chatbot\model.tflearn
[[Node: save_1/RestoreV2_2 = RestoreV2[dtypes=[DT_FLOAT], _device="/job:localhost/replica:0/task:0/device:CPU:0"](_arg_save_1/Const_0_0, save_1/RestoreV2_2/tensor_names, save_1/RestoreV2_2/shape_and_slices)]]
这是我的实际代码:
import nltk
from nltk.stem.lancaster import LancasterStemmer
stemmer = LancasterStemmer()
import numpy
import tflearn
import tensorflow
import random
import json
import pickle
nltk.download('punkt')
with open("intents.json") as file:
data = json.load(file)
try:
with open("data.pickle", "wb") as f:
words, labels, training, output = pickle.load(f)
except:
words = []
labels = []
docs_x = []
docs_y = []
for intent in data["intents"]:
for pattern in intent["patterns"]:
wrds = nltk.word_tokenize(pattern)
words.extend(wrds)
docs_x.append(wrds)
docs_y.append(intent["tag"])
if intent["tag"] not in labels:
labels.append(intent["tag"])
words = [stemmer.stem(w.lower()) for w in words if w != "?"]
words = sorted(list(set(words)))
labels = sorted(labels)
training = []
output = []
out_empty = [0 for _ in range(len(labels))]
for x, doc in enumerate(docs_x):
bag = []
wrds = [stemmer.stem(w.lower()) for w in doc]
for w in words:
if w in wrds:
bag.append(1)
else:
bag.append(0)
output_row = out_empty[:]
output_row[labels.index(docs_y[x])] = 1
training.append(bag)
output.append(output_row)
training = numpy.array(training)
output = numpy.array(output)
with open("data.pickle", "wb") as f:
pickle.dump((words, labels, training, output), f)
tensorflow.reset_default_graph()
net = tflearn.input_data(shape=[None, len(training[0])])
net = tflearn.fully_connected(net, 8)
net = tflearn.fully_connected(net, 8)
net = tflearn.fully_connected(net, len(output[0]), activation="softmax")
net = tflearn.regression(net)
model = tflearn.DNN(net)
'''
try:
model.load("model.tflearn")
except:
model.fit(training, output, n_epoch=1000, batch_size=8, show_metric=True)
model.save("model.tflearn")
'''
model.load(".\model.tflearn")
model.fit(training, output, n_epoch=1000, batch_size=8, show_metric=True)
model.save("model.tflearn")
def bag_of_words(s, words):
bag = [0 for _ in range(len(words))]
s_words = nltk.word_tokenize(s)
s_words = [stemmer.stem(word.lower()) for word in s_words]
for se in s_words:
for i, w in enumerate(words):
if w == se:
bag[i] = 1
return numpy.array(bag)
def chat():
print("Start talking with the bot (type quit to stop)!")
while True:
inp = input("You: ")
if inp.lower() == "quit":
break
results = model.predict([bag_of_words(inp, words)])
results_index = numpy.argmax(results)
tag = labels[results_index]
for tg in data["intents"]:
if tg['tag'] == tag:
responses = tg['responses']
print(random.choice(responses))
chat()
请帮助我。预先谢谢你!