- datax不能直接将从hive同步到mysql,所以从hdfs同步到mysql。
{
"job": {
"setting": {
"speed": {
"channel": 3
}
},
"content": [
{
"reader": {
"name": "hdfsreader",
"parameter": {
"path": "/data/nshop/ads/actlog/ads_nshop_actlog_launch_gsets/bdp_day=20230711/000000_0",
"defaultFS": "hdfs://caiji:8020",
"column": [
{ "index": 0, "type": "string" },
{ "index": 1, "type": "string" },
{ "index": 2, "type": "string" },
{ "index": 3, "type": "string" },
{ "index": 4, "type": "string" },
{ "index": 5, "type": "long" },
{ "index": 6, "type": "long" },
{ "value": "20230711", "type": "string" }
],
"fileType": "text",
"encoding": "UTF-8",
"filedDelimiter": ","
}
},
"writer": {
"name": "mysqlwriter",
"parameter": {
"writeMode": "insert",
"username": "root",
"password": "123456",
"column": [
"os",
"manufacturer",
"carrier",
"network_type",
"area_code",
"user_count",
"launch_count",
"bdp_day"
],
"connection": [
{
"jdbcUrl": "jdbc:mysql://192.168.184.129:3306/ads_nshop",
"table": ["ads_nshop_actlog_launch_gsets"]
}
]
}
}
}
]
}
}