使用Payload Spark建表时指定Payload create table hudi_test(id int, comb int, price string, name string, par string) using hudi options(primaryKey = "id", preCombineField = "comb", payloadClass="org.apache.hudi.common.model.OverwriteWithLatestAvroPayload") partitioned by (par); Datasource方式写入时指定Payload data.write.format("hudi").option("hoodie.datasource.write.table.type", COW_TABLE_TYPE_OPT_VAL).option("hoodie.datasource.write.precombine.field", "comb").option("hoodie.datasource.write.recordkey.field", "id").option("hoodie.datasource.write.partitionpath.field", "par").option("hoodie.datasource.write.payload.class", "org.apache.hudi.common.model.DefaultHoodieRecordPayload").option("hoodie.datasource.write.keygenerator.class", "org.apache.hudi.keygen.SimpleKeyGenerator").option("hoodie.datasource.write.operation", "upsert").option("hoodie.datasource.hive_sync.enable", "true").option("hoodie.datasource.hive_sync.partition_fields", "par").option("hoodie.datasource.hive_sync.partition_extractor_class", "org.apache.hudi.hive.MultiPartKeysValueExtractor").option("hoodie.datasource.hive_sync.table", "hudi_test").option("hoodie.datasource.hive_sync.use_jdbc", "false").option("hoodie.upsert.shuffle.parallelism", 4).option("hoodie.datasource.write.hive_style_partitioning", "true").option("hoodie.table.name", "hudi_test").mode(Append).save(s"/tmp/hudi_test")