我正在关注NMT(https://github.com/tensorflow/tensorflow/blob/r1.13/tensorflow/contrib/eager/python/examples/nmt_with_attention/nmt_with_attention.ipynb)教程,并将其应用于自己的用例。不幸的是,当我尝试绘制注意权重时,如果输入太长(例如14而不是7),则会遇到x轴的对齐问题。
在此代码块中,绘图按预期进行:
import numpy as np
from matplotlib import pyplot as plt
def plot_attention():
attention = np.array([[7.78877574e-10, 4.04739769e-10, 6.65854022e-05, 1.63362725e-04,
2.85054208e-04, 8.50252633e-04, 4.58042100e-02],
[9.23501700e-02, 5.69618285e-01, 1.80586591e-01, 9.78111699e-02,
2.71992851e-02, 9.59911197e-03, 2.54837354e-03]])
sentence = ['<start>', 'hace', 'mucho', 'frio', 'aqui', '.', '<end>']
predicted_sentence = ['it', 's']
fig = plt.figure(figsize=(10,10))
ax = fig.add_subplot(1, 1, 1)
ax.matshow(attention, cmap='viridis')
fontdict = {'fontsize': 14}
ax.set_xticklabels([''] + sentence, fontdict=fontdict, rotation=90)
ax.set_yticklabels([''] + predicted_sentence, fontdict=fontdict)
plt.show()
plot_attention()
但“句子”列表中的元素更多,似乎未对齐:
def plot_attention():
attention = np.array([[7.78877574e-10, 4.04739769e-10, 6.65854022e-05, 1.63362725e-04,
2.85054208e-04, 8.50252633e-04, 4.58042100e-02, 7.78877574e-10, 4.04739769e-10, 6.65854022e-05, 1.63362725e-04,
2.85054208e-04, 8.50252633e-04, 4.58042100e-02],
[9.23501700e-02, 5.69618285e-01, 1.80586591e-01, 9.78111699e-02,
2.71992851e-02, 9.59911197e-03, 2.54837354e-03, 7.78877574e-10, 4.04739769e-10, 6.65854022e-05, 1.63362725e-04,
2.85054208e-04, 8.50252633e-04, 4.58042100e-02]])
sentence = ['<start>', 'hace', 'mucho', 'frio', 'aqui', '.', '<end>', '<start>', 'hace', 'mucho', 'frio', 'aqui', '.', '<end>']
predicted_sentence = ['it', 's']
fig = plt.figure(figsize=(20,10))
ax = fig.add_subplot(1, 1, 1)
ax.matshow(attention, cmap='viridis')
fontdict = {'fontsize': 14}
ax.set_xticklabels([''] + sentence, fontdict=fontdict, rotation=90)
ax.set_yticklabels([''] + predicted_sentence, fontdict=fontdict)
plt.show()
plot_attention()
显示I expect the x-axis to be perfectly aligned and that all elements of the x-axis(不是现在的第二个)