自从升级到使用Gatsby V2以来,我一直在努力将this.state
传递给子组件以用作this.props
。
例如,我有一个容器,其中将data1
和data2
添加为this.state
,
class Parent extends React.Component {
constructor(props) {
super(props);
this.state = {
data1: '',
data2: ''
};
}
componentDidMount() {
// Loading database
.then(doc =>
this.setState({
data1: doc.data().data1,
data2: doc.data().data2
})
);
}
render() {
const children = this.props;
const stateAsProps = React.Children.map(children, child =>
React.cloneElement(child, {
data1: this.state.data1,
data2: this.state.data2
})
);
return (
<div>{stateAsProps}</div>
);
}
}
,子组件为
class Child extends Component {
constructor(props) {
super(props);
this.state = {};
}
render() {
return (
<h1>{this.props.data1}</h1>
<p>{this.props.data2}</p>
);
}
}
最后将其带入页面
const Page = () => (
<Parent authUserID="01234" campaignID="56789">
<Child />
</Parent>
);
在Gatsby V1中,它可以正常工作,但是现在进行迁移时,我收到错误Uncaught Error: Objects are not valid as a React child (found: object with keys {authUserID, campaignID, children}). If you meant to render a collection of children, use an array instead.
有人可以建议为什么和如何解决此问题吗?
答案 0 :(得分:1)
您正在将整个props对象用作import os
srcdir = "/home/pi/math/"
def sum_file_contents():
value = float()
for file in os.listdir(srcdir):
if file.endswith(('math')):
value += float(open(os.path.join(srcdir, file), "r").read().strip())
return value
print(sum_file_contents())
组件中的子对象。确保您从道具中解散了def calculate_output_shape(input, filter_size_h, filter_size_w,
stride_h, stride_w, num_outputs, padding='SAME', data_format='NHWC'):
#calculation of the output_shape:
if data_format == "NHWC":
input_channel_size = input.get_shape().as_list()[3]
input_size_h = input.get_shape().as_list()[1]
input_size_w = input.get_shape().as_list()[2]
stride_shape = [1, stride_h, stride_w, 1]
if padding == 'VALID':
output_size_h = (input_size_h - 1)*stride_h + filter_size_h
output_size_w = (input_size_w - 1)*stride_w + filter_size_w
elif padding == 'SAME':
output_size_h = (input_size_h - 1)*stride_h + 1
output_size_w = (input_size_w - 1)*stride_w + 1
else:
raise ValueError("unknown padding")
output_shape = tf.stack([tf.shape(input)[0],
output_size_h, output_size_w,
num_outputs])
elif data_format == "NCHW":
input_channel_size = input.get_shape().as_list()[1]
input_size_h = input.get_shape().as_list()[2]
input_size_w = input.get_shape().as_list()[3]
stride_shape = [1, 1, stride_h, stride_w]
if padding == 'VALID':
output_size_h = (input_size_h - 1)*stride_h + filter_size_h
output_size_w = (input_size_w - 1)*stride_w + filter_size_w
elif padding == 'SAME':
output_size_h = (input_size_h - 1)*stride_h + 1
output_size_w = (input_size_w - 1)*stride_w + 1
else:
raise ValueError("unknown padding")
output_shape = tf.stack([tf.shape(input)[0],
output_size_h, output_size_w, num_outputs])
else:
raise ValueError("unknown data_format")
return output_shape
对象,该对象将按预期工作。
df.registerTempTable("dfTbl")
dfNew= spark.sql("""
SELECT *, cast(time/1000 as Timestamp) as newTIMESTMP
FROM dfTbl d
""")