以下代码收到警告:
Warning: Filter is changing an uncontrolled input of type undefined to be controlled. Input elements should not switch from uncontrolled to controlled (or vice versa). Decide between using a controlled or uncontrolled input element for the lifetime of the component.
Filter.js:
import React from 'react';
import { connect } from 'react-redux';
class Filter extends React.Component {
constructor(props) {
super();
}
setFilter = (event) => {
let v = event.target.value
if (v.length === 0) v = null;
this.props.dispatch({
type: 'SET_FILTER',
filter: v
});
}
render() {
return (
<p>
<input id='filter' className='form-control' onChange={this.setFilter} value={this.props.state.filter} />
</p>
);
}
}
const mapStateToProps = (state, ownProps) => {
return {
state: state
}
}
const mapDispatchToProps = (dispatch, ownProps) => {
return {
dispatch: dispatch
}
}
Filter = connect(mapStateToProps, mapDispatchToProps)(Filter)
export default Filter
我很难看到这与文档的不同之处&#39;示例,除了我使用redux状态。
<input type="text" value={this.state.value} onChange={this.handleChange} />
但我可能会遗漏一些东西......
答案 0 :(得分:1)
不要将null
值设置为undefined
或null
。这使它认为它是不受控制的。如果您想将""
保留为<input id='filter'
className='form-control'
onChange={this.setFilter}
value={this.props.state.filter || ""} // note the || ""
/>
而不是sc = SparkContext(appName="Document Similarity")
lines = sc.wholeTextFiles(sys.argv[1])
articles = lines.flatMap(lambda x: re.split(r' ',x[1]))
shingles = articles.flatMap(shingle_pairs.get_pairs)
sig_vecs = shingles.groupBy(lambda x: x[1]) \
.map(lambda x: sig_vector.create_vector(x, a, b, n, p))
centroids = k_means.init_centroids(sig_size, k)
for i in range(max_it):
# assign documents to closest cluster
docs = sig_vecs.map(lambda x: k_means.classify_docs(x, centroids))
# get count by key to use in mean calculation for new clusters
doc_count = docs.countByKey()
# recompute cluster centroids
reduced_docs = docs.reduceByKey(k_means.reducer)
centroids = reduced_docs.map(lambda x: k_means.mapper(x, doc_count))
,那么只需将其作为支持传递给输入即可。这应该避免错误:
pickle.PicklingError: Could not serialize object: Exception:
It appears that you are attempting to broadcast an RDD or reference an
RDD from an action or transformation. RDD transformations and actions
can only be invoked by the driver, not inside of other transformations;
for example, rdd1.map(lambda x: rdd2.values.count() * x) is invalid
because the values transformation and count action cannot be performed
inside of the rdd1.map transformation. For more information, see SPARK-5063.