我需要将两个数据帧连接在一起,以便在存在时添加列数据,并且它的行为不符合我的预期。
DFA:
# +---+-----+-----+
# | id|d_var|d_val|
# +---+-----+-----+
# |a01| 112| null|
# |a01| 113| 0|
# |a02| 112| null|
# |a02| 113| 0|
# +---+-----+-----+
DFB:
# +---+-----+-----+------+-----+
# | id|d_var|d_val|c_type|c_val|
# +---+-----+-----+------+-----+
# |a01| 112| null| red| 1|
# |a01| 113| 0| red| 1|
# +---+-----+-----+------+-----+
以下是意外行为的数据框创建和加入调用:
dfA = spark.createDataFrame(
[
('a01', '112', None),
('a01', '113', '0'),
('a02', '112', None),
('a02', '113', '0')
],
('id', 'd_var', 'd_val')
)
dfB = spark.createDataFrame(
[
('a01', '112', None, 'red', '1'),
('a01', '113', '0', 'red', '1')
],
('id', 'd_var', 'd_val', 'c_type', 'c_val')
)
static_cols = dfB.columns[:3]
dfA.join(dfB, static_cols, how='left').orderBy('id', 'd_var').show()
输出:
# +---+-----+-----+------+-----+
# | id|d_var|d_val|c_type|c_val|
# +---+-----+-----+------+-----+
# |a01| 112| null| null| null| <-
# |a01| 113| 0| red| 1|
# |a02| 112| null| null| null|
# |a02| 113| 0| null| null|
# +---+-----+-----+------+-----+
预期(和期望)输出:
# +---+-----+-----+------+-----+
# | id|d_var|d_val|c_type|c_val|
# +---+-----+-----+------+-----+
# |a01| 112| null| red| 1| <-
# |a01| 113| 0| red| 1|
# |a02| 112| null| null| null|
# |a02| 113| 0| null| null|
# +---+-----+-----+------+-----+
答案 0 :(得分:0)
(张贴我的答案,这应该与@ Shaido的补充一起)
cond = (dfA.id.eqNullSafe(dfB.id) & dfA.d_var.eqNullSafe(dfB.d_var) & dfA.d_val.eqNullSafe(dfB.d_val))
dfA.join(dfB, cond, how='left').select(dfA.id, dfA.d_var, dfA.d_val, dfB.c_type, dfB.c_val).orderBy('id', 'd_var').show()