dataX使用replace模式:
{
"job":{
"setting":{
"speed": {
"channel": 1
}
},
"content":[
{
"reader":{
"name":"hdfsreader",
"parameter":{
"defaultFS":"hdfs://nameservice1",
"hadoopConfig":{
"dfs.nameservices":"nameservice1",
"dfs.ha.namenodes.nameservice1":"namenode30,namenode37",
"dfs.namenode.rpc-address.nameservice1.namenode30":"hadoop101:8020",
"dfs.namenode.rpc-address.nameservice1.namenode37":"hadoop102:8020",
"dfs.client.failover.proxy.provider.nameservice1":"org.apache.hadoop.hdfs.server.namenode.ha.ConfiguredFailoverProxyProvider"
},
"column":[
{"type":"string","index":0},
{"type":"string","index":1},
{"type":"string","index":2}
],
"fileType":"orc",
"compress": "snappy",
"encoding":"UTF-8",
"path":"/user/warehouse/dev_ads/ads_table_di/dt=${dt}/[^.]*"
}
},
"writer":{
"name": "mysqlwriter",
"parameter":
{
"writeMode": "replace",
"username": "xxx",
"password": "***",
"preSql": ["delete from table where date(create_time)=\'${dt_1}\'"],
"column": [
"col1",
"col2",
"col3"
],
"connection": [
{
"jdbcUrl": "jdbc:mysql://xxxxxx:3306/operating-management?useUnicode=true&characterEncoding=utf8",
"table": ["om_check_order_hive"]
}
]
}
}
}
]
}
}
MYsql要有unique key,datax根据unique key字段来更新数据