我试图通过从ViewModel观察LiveData来更新EditText中的值。
这是我要更新其EditText值的片段:
private var qnaQuestionData: String = "TEST VALUE"
private val communicationViewModel by lazy {
ViewModelProvider(this).get(
MyProfileEditSharedViewModel::class.java
)
}
override fun onActivityCreated(savedInstanceState: Bundle?) {
super.onActivityCreated(savedInstanceState)
communicationViewModel.question.observe(viewLifecycleOwner, Observer {data ->
data.let { qnaQuestionData = it }
})
et_question.setText(qnaQuestionData)
}
这是我的ViewModel:
class MyProfileEditSharedViewModel : ViewModel() {
val question: MutableLiveData<String> by lazy { MutableLiveData<String>() }
}
当我运行此代码时,我的EditText的值设置为“测试值”,这意味着从未观察到LiveData。我该如何解决?
谢谢!
答案 0 :(得分:1)
将通话et_question.setText(qnaQuestionData)
移至观察者:
communicationViewModel.question.observe(viewLifecycleOwner, Observer { data ->
qnaQuestionData = data
et_question.setText(qnaQuestionData)
})
et_question.setText(qnaQuestionData)
答案 1 :(得分:1)
我将您的MyProfileEditSharedViewModel更改为这样
class MyProfileEditSharedViewModel : ViewModel() {
val question: MutableLiveData<String> = MutableLiveData()
}
您的片段应该是这样
class someFragment(): Fragment() {
private var qnaQuestionData: String = "TEST VALUE"
private val communicationViewModel
get() = ViewModelProviders.of(activity!!).get(MyProfileEditSharedViewModel::class.java)
override fun onActivityCreated(savedInstanceState: Bundle?) {
super.onActivityCreated(savedInstanceState)
et_question.setText(qnaQuestionData)
communicationViewModel.question.postValue("This Message will Show") //<------- for testing
communicationViewModel.question.observe(this, Observer {
qnaQuestionData = it
et_question.setText(qnaQuestionData)
})
}
}
答案 2 :(得分:0)
更改
private val communicationViewModel by lazy {
ViewModelProvider(requireActivity()).get(
MyProfileEditSharedViewModel::class.java
)
}
到
this
解决了问题。
官方文档告诉我将requireActivity()
更改为
tf.compat.v1.disable_eager_execution()
class RLLSTM():
def __init__(self, timestep, ):
super(RLLSTM, self).__init__()
self.input = tf.compat.v1.placeholder(shape=[None, 4, 5], dtype=tf.float32)
self.lstm = tf.contrib.rnn.BasicLSTMCell(4,state_is_tuple=False)
# self.cell = tf.contrib.rnn.MultiRNNCell([self.lstm ]*4)
self.state_in = self.lstm.zero_state(32, tf.float32)
self.x = tf.unstack(self.input, axis=1)
for t in self.x:
self.output, self.state_in = self.lstm(t, self.state_in)
def train():
# tf.compat.v1.reset_default_graph()
x = np.zeros(shape=[32, 4,5], dtype=np.float32)
init = tf.global_variables_initializer()
with tf.compat.v1.Session() as sess:
sess.run(init)
lstm = RLLSTM(4)
# sess.run(tf.global_variables_initializer())
outputs = sess.run([lstm.output, lstm.state_in], feed_dict={lstm.input: x})
print(outputs)
if __name__ == '__main__':
train()