1111 Online Map(30 分)(cj)

本文介绍了一个在线地图系统如何推荐从当前位置到目的地的最短和最快路径。通过使用Dijkstra算法来找出满足条件的最佳路径,并详细解释了算法实现过程。

摘要生成于 C知道 ,由 DeepSeek-R1 满血版支持, 前往体验 >

1111 Online Map(30 分)

Input our current position and a destination, an online map can recommend several paths. Now your job is to recommend two paths to your user: one is the shortest, and the other is the fastest. It is guaranteed that a path exists for any request.

Input Specification:

Each input file contains one test case. For each case, the first line gives two positive integers N (2≤N≤500), and M, being the total number of streets intersections on a map, and the number of streets, respectively. Then M lines follow, each describes a street in the format:

V1 V2 one-way length time

where V1 and V2 are the indices (from 0 to N−1) of the two ends of the street; one-way is 1 if the street is one-way from V1 to V2, or 0 if not; length is the length of the street; and time is the time taken to pass the street.

Finally a pair of source and destination is given.

Output Specification:

For each case, first print the shortest path from the source to the destination with distance D in the format:

Distance = D: source -> v1 -> ... -> destination

Then in the next line print the fastest path with total time T:

Time = T: source -> w1 -> ... -> destination

In case the shortest path is not unique, output the fastest one among the shortest paths, which is guaranteed to be unique. In case the fastest path is not unique, output the one that passes through the fewest intersections, which is guaranteed to be unique.

In case the shortest and the fastest paths are identical, print them in one line in the format:

Distance = D; Time = T: source -> u1 -> ... -> destination

Sample Input 1:

10 15
0 1 0 1 1
8 0 0 1 1
4 8 1 1 1
3 4 0 3 2
3 9 1 4 1
0 6 0 1 1
7 5 1 2 1
8 5 1 2 1
2 3 0 2 2
2 1 1 1 1
1 3 0 3 1
1 4 0 1 1
9 7 1 3 1
5 1 0 5 2
6 5 1 1 2
3 5

Sample Output 1:

Distance = 6: 3 -> 4 -> 8 -> 5
Time = 3: 3 -> 1 -> 5

Sample Input 2:

7 9
0 4 1 1 1
1 6 1 1 3
2 6 1 1 1
2 5 1 2 2
3 0 0 1 1
3 1 1 1 3
3 2 1 1 2
4 5 0 2 2
6 5 1 1 2
3 5

Sample Output 2:

Distance = 3; Time = 4: 3 -> 2 -> 5

绝望。。。dfs 会超时间 , bfs 会超内存, Ford 多源最短路径不好求路径。。 只能用 Dijkstra 单源最短路径 了

知识点就是 Dijksta 的路径寻找方式,路径地点个数储存,Dijkstra 的判断条件。

code

#pragma warning(disable:4996)
#include <iostream>
#include <vector>
#define inf 0x7fffffff
using namespace std;
int map[505][505][2];
int distancepath[505][2];
int timepath[505][2];
int path_w[505];
int path_o[505];
int posnum[505];
bool vis[505];
int n, m;
void Dijkstra(int pos) {
	for (int i = 0; i < n; ++i) {
		distancepath[i][0] = map[pos][i][0];
		distancepath[i][1] = map[pos][i][1];
	}
	while (1) {
		int minpos = -1, min_d = 0x7fffffff, min_t = 0x7fffffff;
		for (int i = 1; i < n; ++i) {
			if (vis[i] == 0 && (distancepath[i][0] < min_d || 
				distancepath[i][1] < min_t && 
				distancepath[i][0] == min_d) ) {
				minpos = i;
				min_d = distancepath[i][0];
				min_t = distancepath[i][1];
			}
		}
		vis[minpos] = 1;
		if (minpos == -1) return;
		for (int i = 0; i < n; ++i) {
			if (vis[i] == 0 && (map[minpos][i][0] != inf && distancepath[i][0] > min_d + map[minpos][i][0] ||
				distancepath[i][0] == min_d + map[minpos][i][0] && 
				distancepath[i][1] > min_t + map[minpos][i][1]) ) {
				distancepath[i][0] = min_d + map[minpos][i][0];
				distancepath[i][1] = min_t + map[minpos][i][1];
				path_o[i] = minpos;
			}
		}
	}
}
void Dijkstra_w(int pos) {
	for (int i = 0; i < n; ++i) {
		timepath[i][0] = map[pos][i][1];
		timepath[i][1] = 0;
	}
	while (1) {
		int minpos = -1, min_t = 0x7fffffff;
		for (int i = 1; i < n; ++i) {
			if (vis[i] == 0 && timepath[i][0] < min_t) {
				minpos = i;
				min_t = timepath[i][0];
			}
		}
		vis[minpos] = 1;
		if (minpos == -1) return;
		for (int i = 0; i < n; ++i) {
			if (vis[i] == 0 && (map[minpos][i][1] != inf && timepath[i][0] > min_t + map[minpos][i][1] ||
				timepath[i][0] == min_t + map[minpos][i][1] &&
				timepath[i][1] > timepath[minpos][1] + 1)) {
				timepath[i][0] = min_t + map[minpos][i][1];
				timepath[i][1] = timepath[minpos][1] + 1;
				path_w[i] = minpos;
			}
		}
	}
}
void init() {
	for (int i = 0; i < 505; ++i) {
		path_o[i] = -1;
		path_w[i] = -1;
	}
	for (int i = 0; i < 505; ++i) {
		for (int j = 0; j < 505; ++j) {
			map[i][j][0] = inf;
			map[i][j][1] = inf;
		}
		map[i][i][0] = 0;
		map[i][i][1] = 0;
	}
}
void smallinit() {
	for (int i = 0; i < 505; ++i) {
		vis[i] = 0;
	}
}
int main() {
	init();
	cin >> n >> m;
	int v1, v2, one_way, length, time;
	for (int i = 0; i < m; ++i) {
		cin >> v1 >> v2 >> one_way >> length >> time;
		map[v1][v2][0] = length;
		map[v1][v2][1] = time;
		if (!one_way) {
			map[v2][v1][0] = length;
			map[v2][v1][1] = time;
		}
	}
	int source, destination;
	cin >> source >> destination;
	Dijkstra(source);
	int p = destination;
	int min_d = distancepath[destination][0];
	vector<int> tmp_o,tmp_w;
	while (p != -1) {
		tmp_o.push_back(p);
		p = path_o[p];
	}
	tmp_o.push_back(source);
	smallinit();
	Dijkstra_w(source);
	p = destination;
	int min_t = timepath[destination][0];
	while (p != -1) {
		tmp_w.push_back(p);
		p = path_w[p];
	}
	tmp_w.push_back(source);
	if (tmp_o == tmp_w) {
		cout << "Distance = " << min_d << "; ";
	}
	else {
		cout << "Distance = " << min_d << ": " ;
		for (int i = tmp_o.size()-1; i >= 0; --i) {
			if (i != tmp_o.size()-1) cout << " -> ";
			cout << tmp_o[i];
		}
		cout << endl;
	}
	cout << "Time = " << min_t << ": ";
	for (int i = tmp_w.size()-1; i >= 0; --i) {
		if (i != tmp_w.size()-1) cout << " -> ";
		cout << tmp_w[i];
	}
	cout << endl;
	system("pause");
	return 0;
}

 

09:45:47.991 [Thread-28] INFO c.r.k.s.i.KettleJobServiceImpl - [runJobRightNow,281] - jobName,D:/kettle/workspace/调试/base_etl_online/etl_plasma_out.kjb 09:45:47.999 [http-nio-8888-exec-20] DEBUG c.r.k.m.K.selectKettleJobList - [debug,135] - <== Total: 5 09:45:48.005 [schedule-pool-1] DEBUG c.r.s.m.S.insertOperlog - [debug,135] - <== Updates: 1 09:45:48.204 [Thread-34] INFO c.r.k.t.KettleUtil - [runKettleJob,276] - Starting job [etl_plasma_out_job] with log channel ID: ccf52a19-00d5-48c4-8804-cecd216d57af 09:45:48.205 [Thread-34] INFO c.r.k.t.KettleUtil - [runKettleJob,283] - 运行作业[etl_plasma_out_job]携带参数={"SCHEDULEINO":"122"} 09:45:48.207 [Thread-34] INFO c.r.k.l.XLogListener - [addLogListener,241] - 任务etl_plasma_out_job日志监听启动了,日志路径D:\kettle\logs... 09:45:48.210 [Thread-34] INFO c.r.k.t.KettleUtil - [runKettleJob,296] - 运行作业[etl_plasma_out_job]携带LogChannelId=ccf52a19-00d5-48c4-8804-cecd216d57af 2025/07/29 09:45:48 - etl_plasma_out_job - 开始执行任务 09:45:48.214 [Thread-47] WARN o.pentaho.di.job.Job - [logToLogger,92] - [D:\kettle\workspace\调试\base_etl_online\etl_plasma_out.kjb] 开始执行任务 2025/07/29 09:45:48 - etl_plasma_out_job - 开始项[浆站编号入参] 09:45:48.502 [Thread-47] INFO o.pentaho.di.job.Job - [logToLogger,96] - [D:\kettle\workspace\调试\base_etl_online\etl_plasma_out.kjb] 开始项[浆站编号入参] 2025/07/29 09:45:48 - 自定义日志输出 - 传入参数:'122' 09:45:48.629 [Thread-47] DEBUG c.r.k.l.XLogListener - [eventAdded,231] - 因为异常丢失的日志{"timeStamp":1753753548626,"level":"MINIMAL","message":{"logChannelId":"0b269c6b-b25d-4329-947f-2e9588dcd34c","level":"MINIMAL","subject":"自定义日志输出","error":false,"message":"传入参数:'122'"}} 2025/07/29 09:45:48 - etl_plasma_out_job - 开始项[调度任务] 09:45:48.630 [Thread-47] INFO o.pentaho.di.job.Job - [logToLogger,96] - [D:\kettle\workspace\调试\base_etl_online\etl_plasma_out.kjb] 开始项[调度任务] 2025/07/29 09:45:48 - 调度任务 - Using run configuration [Pentaho local] 09:45:48.808 [Thread-47] INFO o.pentaho.di.job.Job - [logToLogger,96] - [D:\kettle\workspace\调试\base_etl_online\etl_plasma_out.kjb] Using run configuration [Pentaho local] 2025/07/29 09:45:48 - 调度任务 - Using legacy execution engine 09:45:48.861 [Thread-47] INFO o.pentaho.di.job.Job - [logToLogger,96] - [D:\kettle\workspace\调试\base_etl_online\etl_plasma_out.kjb] Using legacy execution engine 2025/07/29 09:45:48 - 调度任务初始化 - 为了转换解除补丁开始 [调度任务初始化] 09:45:48.913 [Thread-47] INFO o.p.di.trans.Trans - [logToLogger,96] - [D:\kettle\workspace\调试\base_etl_online\etl_plasma_out.kjb file:///D:/kettle/workspace/调试/base_etl_online/t_etl_scheduler_online.ktr] 为了转换解除补丁开始 [调度任务初始化] Loading class `com.mysql.jdbc.Driver'. This is deprecated. The new driver class is `com.mysql.cj.jdbc.Driver'. The driver is automatically registered via the SPI and manual loading of the driver class is generally unnecessary. 2025/07/29 09:45:49 - 调度任务选择.0 - Finished reading query, closing connection. 09:45:49.508 [调度任务初始化 - 调度任务选择] INFO o.p.di.trans.Trans - [logToLogger,96] - [D:\kettle\workspace\调试\base_etl_online\etl_plasma_out.kjb file:///D:/kettle/workspace/调试/base_etl_online/t_etl_scheduler_online.ktr] Finished reading query, closing connection. 2025/07/29 09:45:49 - 设置正在调用任务.0 - Setting environment variables... 09:45:49.511 [调度任务初始化 - 设置正在调用任务] INFO o.p.di.trans.Trans - [logToLogger,96] - [D:\kettle\workspace\调试\base_etl_online\etl_plasma_out.kjb file:///D:/kettle/workspace/调试/base_etl_online/t_etl_scheduler_online.ktr] Setting environment variables... 2025/07/29 09:45:49 - 设置正在调用任务.0 - Set variable STATIONID to value [122] 09:45:49.562 [调度任务初始化 - 设置正在调用任务] INFO o.p.di.trans.Trans - [logToLogger,96] - [D:\kettle\workspace\调试\base_etl_online\etl_plasma_out.kjb file:///D:/kettle/workspace/调试/base_etl_online/t_etl_scheduler_online.ktr] Set variable STATIONID to value [122] 2025/07/29 09:45:49 - 调度任务选择.0 - 完成处理 (I=1, O=0, R=0, W=1, U=0, E=0) 09:45:49.568 [调度任务初始化 - 调度任务选择] INFO o.p.di.trans.Trans - [logToLogger,96] - [D:\kettle\workspace\调试\base_etl_online\etl_plasma_out.kjb file:///D:/kettle/workspace/调试/base_etl_online/t_etl_scheduler_online.ktr] 完成处理 (I=1, O=0, R=0, W=1, U=0, E=0) 2025/07/29 09:45:49 - 设置正在调用任务.0 - Set variable BASE_BIMS to value [BASE_BIMS_122] 09:45:49.615 [调度任务初始化 - 设置正在调用任务] INFO o.p.di.trans.Trans - [logToLogger,96] - [D:\kettle\workspace\调试\base_etl_online\etl_plasma_out.kjb file:///D:/kettle/workspace/调试/base_etl_online/t_etl_scheduler_online.ktr] Set variable BASE_BIMS to value [BASE_BIMS_122] 2025/07/29 09:45:49 - 设置正在调用任务.0 - Finished after 1 rows. 09:45:49.670 [调度任务初始化 - 设置正在调用任务] INFO o.p.di.trans.Trans - [logToLogger,96] - [D:\kettle\workspace\调试\base_etl_online\etl_plasma_out.kjb file:///D:/kettle/workspace/调试/base_etl_online/t_etl_scheduler_online.ktr] Finished after 1 rows. 2025/07/29 09:45:49 - 设置正在调用任务.0 - 完成处理 (I=0, O=0, R=1, W=1, U=0, E=0) 09:45:49.720 [调度任务初始化 - 设置正在调用任务] INFO o.p.di.trans.Trans - [logToLogger,96] - [D:\kettle\workspace\调试\base_etl_online\etl_plasma_out.kjb file:///D:/kettle/workspace/调试/base_etl_online/t_etl_scheduler_online.ktr] 完成处理 (I=0, O=0, R=1, W=1, U=0, E=0) 2025/07/29 09:45:49 - etl_plasma_out_job - 开始项[获取出库日期] 09:45:49.772 [Thread-47] INFO o.pentaho.di.job.Job - [logToLogger,96] - [D:\kettle\workspace\调试\base_etl_online\etl_plasma_out.kjb] 开始项[获取出库日期] 2025/07/29 09:45:49 - 获取出库日期 - Using run configuration [Pentaho local] 09:45:49.827 [Thread-47] INFO o.pentaho.di.job.Job - [logToLogger,96] - [D:\kettle\workspace\调试\base_etl_online\etl_plasma_out.kjb] Using run configuration [Pentaho local] 2025/07/29 09:45:49 - 获取出库日期 - Using legacy execution engine 09:45:49.879 [Thread-47] INFO o.pentaho.di.job.Job - [logToLogger,96] - [D:\kettle\workspace\调试\base_etl_online\etl_plasma_out.kjb] Using legacy execution engine 2025/07/29 09:45:49 - get_plasma_outdate - 为了转换解除补丁开始 [get_plasma_outdate] 09:45:49.929 [Thread-47] INFO o.p.di.trans.Trans - [logToLogger,96] - [D:\kettle\workspace\调试\base_etl_online\etl_plasma_out.kjb file:///D:/kettle/workspace/调试/base_etl_online\base_plasma_out\get_plasma_outdate.ktr] 为了转换解除补丁开始 [get_plasma_outdate] 2025/07/29 09:45:50 - 出库日期.0 - Finished reading query, closing connection. 2025/07/29 09:45:50 - 设置出库日期变量.0 - Setting environment variables... 09:45:50.161 [get_plasma_outdate - 出库日期] INFO o.p.di.trans.Trans - [logToLogger,96] - [D:\kettle\workspace\调试\base_etl_online\etl_plasma_out.kjb file:///D:/kettle/workspace/调试/base_etl_online\base_plasma_out\get_plasma_outdate.ktr] Finished reading query, closing connection. 09:45:50.161 [get_plasma_outdate - 设置出库日期变量] INFO o.p.di.trans.Trans - [logToLogger,96] - [D:\kettle\workspace\调试\base_etl_online\etl_plasma_out.kjb file:///D:/kettle/workspace/调试/base_etl_online\base_plasma_out\get_plasma_outdate.ktr] Setting environment variables... 2025/07/29 09:45:50 - 设置出库日期变量.0 - Set variable plasmaOutDate to value [20250729] 09:45:50.212 [get_plasma_outdate - 设置出库日期变量] INFO o.p.di.trans.Trans - [logToLogger,96] - [D:\kettle\workspace\调试\base_etl_online\etl_plasma_out.kjb file:///D:/kettle/workspace/调试/base_etl_online\base_plasma_out\get_plasma_outdate.ktr] Set variable plasmaOutDate to value [20250729] 2025/07/29 09:45:50 - 出库日期.0 - 完成处理 (I=1, O=0, R=0, W=1, U=0, E=0) 09:45:50.219 [get_plasma_outdate - 出库日期] INFO o.p.di.trans.Trans - [logToLogger,96] - [D:\kettle\workspace\调试\base_etl_online\etl_plasma_out.kjb file:///D:/kettle/workspace/调试/base_etl_online\base_plasma_out\get_plasma_outdate.ktr] 完成处理 (I=1, O=0, R=0, W=1, U=0, E=0) 2025/07/29 09:45:50 - 设置出库日期变量.0 - Finished after 1 rows. 09:45:50.267 [get_plasma_outdate - 设置出库日期变量] INFO o.p.di.trans.Trans - [logToLogger,96] - [D:\kettle\workspace\调试\base_etl_online\etl_plasma_out.kjb file:///D:/kettle/workspace/调试/base_etl_online\base_plasma_out\get_plasma_outdate.ktr] Finished after 1 rows. 2025/07/29 09:45:50 - 设置出库日期变量.0 - 完成处理 (I=0, O=0, R=1, W=1, U=0, E=0) 09:45:50.318 [get_plasma_outdate - 设置出库日期变量] INFO o.p.di.trans.Trans - [logToLogger,96] - [D:\kettle\workspace\调试\base_etl_online\etl_plasma_out.kjb file:///D:/kettle/workspace/调试/base_etl_online\base_plasma_out\get_plasma_outdate.ktr] 完成处理 (I=0, O=0, R=1, W=1, U=0, E=0) 2025/07/29 09:45:50 - etl_plasma_out_job - 开始项[血浆出库] 09:45:50.371 [Thread-47] INFO o.pentaho.di.job.Job - [logToLogger,96] - [D:\kettle\workspace\调试\base_etl_online\etl_plasma_out.kjb] 开始项[血浆出库] 2025/07/29 09:45:50 - 血浆出库 - Using run configuration [Pentaho local] 09:45:50.433 [Thread-47] INFO o.pentaho.di.job.Job - [logToLogger,96] - [D:\kettle\workspace\调试\base_etl_online\etl_plasma_out.kjb] Using run configuration [Pentaho local] 2025/07/29 09:45:50 - plasma_out - 开始项[厂家浆员档案] 09:45:50.487 [plasma_out UUID: 12b11642-8e6e-4337-905c-07aba5c798e7] INFO o.pentaho.di.job.Job - [logToLogger,96] - [D:\kettle\workspace\调试\base_etl_online\etl_plasma_out.kjb file:///D:/kettle/workspace/调试/base_etl_online\base_plasma_out\etl_plasma_out\etl_plasma_out.kjb] 开始项[厂家浆员档案] 2025/07/29 09:45:50 - 厂家浆员档案 - Using run configuration [Pentaho local] 09:45:50.549 [plasma_out UUID: 12b11642-8e6e-4337-905c-07aba5c798e7] INFO o.pentaho.di.job.Job - [logToLogger,96] - [D:\kettle\workspace\调试\base_etl_online\etl_plasma_out.kjb file:///D:/kettle/workspace/调试/base_etl_online\base_plasma_out\etl_plasma_out\etl_plasma_out.kjb] Using run configuration [Pentaho local] 2025/07/29 09:45:50 - 厂家浆员档案 - Using legacy execution engine 09:45:50.600 [plasma_out UUID: 12b11642-8e6e-4337-905c-07aba5c798e7] INFO o.pentaho.di.job.Job - [logToLogger,96] - [D:\kettle\workspace\调试\base_etl_online\etl_plasma_out.kjb file:///D:/kettle/workspace/调试/base_etl_online\base_plasma_out\etl_plasma_out\etl_plasma_out.kjb] Using legacy execution engine 2025/07/29 09:45:50 - set_factory_donor - 为了转换解除补丁开始 [set_factory_donor] 09:45:50.650 [plasma_out UUID: 12b11642-8e6e-4337-905c-07aba5c798e7] INFO o.p.di.trans.Trans - [logToLogger,96] - [D:\kettle\workspace\调试\base_etl_online\etl_plasma_out.kjb file:///D:/kettle/workspace/调试/base_etl_online\base_plasma_out\etl_plasma_out\etl_plasma_out.kjb file:///D:/kettle/workspace/调试/base_etl_online/base_plasma_out/etl_plasma_out/set_factory_donor.ktr] 为了转换解除补丁开始 [set_factory_donor] 09:45:51.031 [Thread-28] INFO c.r.k.t.KettleUtil - [runKettleJob,276] - Starting job [etl_plasma_out_job] with log channel ID: ccf52a19-00d5-48c4-8804-cecd216d57af 09:45:51.031 [Thread-28] INFO c.r.k.t.KettleUtil - [runKettleJob,283] - 运行作业[etl_plasma_out_job]携带参数={"SCHEDULEINO":"123"} 09:45:51.031 [Thread-28] INFO c.r.k.l.XLogListener - [addLogListener,241] - 任务etl_plasma_out_job日志监听启动了,日志路径D:\kettle\logs... 09:45:51.033 [Thread-28] INFO c.r.k.t.KettleUtil - [runKettleJob,296] - 运行作业[etl_plasma_out_job]携带LogChannelId=ccf52a19-00d5-48c4-8804-cecd216d57af 2025/07/29 09:45:51 - etl_plasma_out_job - 开始执行任务 09:45:51.033 [Thread-65] WARN o.pentaho.di.job.Job - [logToLogger,92] - [D:\kettle\workspace\调试\base_etl_online\etl_plasma_out.kjb] 开始执行任务 2025/07/29 09:45:51 - etl_plasma_out_job - 开始项[浆站编号入参] 09:45:51.301 [Thread-65] INFO o.pentaho.di.job.Job - [logToLogger,96] - [D:\kettle\workspace\调试\base_etl_online\etl_plasma_out.kjb] 开始项[浆站编号入参] 2025/07/29 09:45:51 - 自定义日志输出 - 传入参数:'123' 09:45:51.408 [Thread-65] DEBUG c.r.k.l.XLogListener - [eventAdded,231] - 因为异常丢失的日志{"timeStamp":1753753551408,"level":"MINIMAL","message":{"logChannelId":"0b269c6b-b25d-4329-947f-2e9588dcd34c","level":"MINIMAL","subject":"自定义日志输出","error":false,"message":"传入参数:'123'"}} 09:45:51.408 [Thread-65] DEBUG c.r.k.l.XLogListener - [eventAdded,231] - 因为异常丢失的日志{"timeStamp":1753753551408,"level":"MINIMAL","message":{"logChannelId":"0b269c6b-b25d-4329-947f-2e9588dcd34c","level":"MINIMAL","subject":"自定义日志输出","error":false,"message":"传入参数:'123'"}} 2025/07/29 09:45:51 - etl_plasma_out_job - 开始项[调度任务] 09:45:51.409 [Thread-65] INFO o.pentaho.di.job.Job - [logToLogger,96] - [D:\kettle\workspace\调试\base_etl_online\etl_plasma_out.kjb] 开始项[调度任务] 2025/07/29 09:45:51 - 调度任务 - Using run configuration [Pentaho local] 09:45:51.517 [Thread-65] INFO o.pentaho.di.job.Job - [logToLogger,96] - [D:\kettle\workspace\调试\base_etl_online\etl_plasma_out.kjb] Using run configuration [Pentaho local] 2025/07/29 09:45:51 - 调度任务 - Using legacy execution engine 09:45:51.617 [Thread-65] INFO o.pentaho.di.job.Job - [logToLogger,96] - [D:\kettle\workspace\调试\base_etl_online\etl_plasma_out.kjb] Using legacy execution engine 传递的参数不同,为什么携带LogChannelId=ccf52a19-00d5-48c4-8804-cecd216d57af一样,怎么解决
最新发布
07-30
[root@node ~]# mysql -u root -p Enter password: Welcome to the MySQL monitor. Commands end with ; or \g. Your MySQL connection id is 8 Server version: 8.0.42 MySQL Community Server - GPL Copyright (c) 2000, 2025, Oracle and/or its affiliates. Oracle is a registered trademark of Oracle Corporation and/or its affiliates. Other names may be trademarks of their respective owners. Type 'help;' or '\h' for help. Type '\c' to clear the current input statement. mysql> CREATE DATABASE weblog_db; ERROR 1064 (42000): You have an error in your SQL syntax; check the manual that corresponds to your MySQL server version for the right syntax to use near 'CREATE DATABASE weblog_db' at line 1 mysql> CREATE DATABASE weblog_db; ERROR 1007 (HY000): Can't create database 'weblog_db'; database exists mysql> USE weblog_db; Reading table information for completion of table and column names You can turn off this feature to get a quicker startup with -A Database changed mysql> DROP DATABASE IF EXISTS weblog_db; Query OK, 2 rows affected (0.02 sec) mysql> CREATE DATABASE weblog_db; Query OK, 1 row affected (0.01 sec) mysql> USE weblog_db; Database changed mysql> CREATE TABLE page_visits ( -> page VARCHAR(255) , -> visits BIGINT -> ); ERROR 1064 (42000): You have an error in your SQL syntax; check the manual that corresponds to your MySQL server version for the right syntax to use near 'TABLE page_visits ( page VARCHAR(255) , visits BIGINT )' at line 1 mysql> CREATE TABLE page_visits ( -> page VARCHAR(255), -> visits BIGINT -> ); Query OK, 0 rows affected (0.02 sec) mysql> SHOW TABLES; +---------------------+ | Tables_in_weblog_db | +---------------------+ | page_visits | +---------------------+ 1 row in set (0.00 sec) mysql> ^C mysql> q -> quit -> exit -> ^C mysql> ^C mysql> ^C mysql> ^DBye [root@node ~]# hive SLF4J: Failed to load class "org.slf4j.impl.StaticLoggerBinder". SLF4J: Defaulting to no-operation (NOP) logger implementation SLF4J: See http://www.slf4j.org/codes.html#StaticLoggerBinder for further details. Hive Session ID = 7bb79582-cc2b-49b6-abc7-020dcdc46542 Logging initialized using configuration in jar:file:/home/hive-3.1.3/lib/hive-common-3.1.3.jar!/hive-log4j2.properties Async: true Hive-on-MR is deprecated in Hive 2 and may not be available in the future versions. Consider using a different execution engine (i.e. spark, tez) or using Hive 1.X releases. Hive Session ID = 15d9da52-e18e-40b2-a80f-e76eda81df4c hive> DESCRIBE FORMATTED page_visits; OK # col_name data_type comment page string visits bigint # Detailed Table Information Database: default OwnerType: USER Owner: root CreateTime: Tue Jul 08 01:43:42 CST 2025 LastAccessTime: UNKNOWN Retention: 0 Location: hdfs://node:9000/hive/warehouse/page_visits Table Type: MANAGED_TABLE Table Parameters: COLUMN_STATS_ACCURATE {\"BASIC_STATS\":\"true\"} bucketing_version 2 numFiles 1 numRows 4 rawDataSize 56 totalSize 60 transient_lastDdlTime 1751910222 # Storage Information SerDe Library: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe InputFormat: org.apache.hadoop.mapred.TextInputFormat OutputFormat: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Compressed: No Num Buckets: -1 Bucket Columns: [] Sort Columns: [] Storage Desc Params: serialization.format 1 Time taken: 0.785 seconds, Fetched: 32 row(s) hive> [root@node ~]# [root@node ~]# sqoop export \ > --connect jdbc:mysql://localhost/weblog_db \ > --username root \ > --password Aa@123456 \ > --table page_visits \ > --export-dir hdfs://node:9000/hive/warehouse/page_visits \ > --input-fields-terminated-by '\001' \ > --num-mappers 1 Warning: /home/sqoop-1.4.7/../hcatalog does not exist! HCatalog jobs will fail. Please set $HCAT_HOME to the root of your HCatalog installation. Warning: /home/sqoop-1.4.7/../accumulo does not exist! Accumulo imports will fail. Please set $ACCUMULO_HOME to the root of your Accumulo installation. 2025-07-08 15:28:12,550 INFO sqoop.Sqoop: Running Sqoop version: 1.4.7 2025-07-08 15:28:12,587 WARN tool.BaseSqoopTool: Setting your password on the command-line is insecure. Consider using -P instead. 2025-07-08 15:28:12,704 INFO manager.MySQLManager: Preparing to use a MySQL streaming resultset. 2025-07-08 15:28:12,708 INFO tool.CodeGenTool: Beginning code generation Loading class `com.mysql.jdbc.Driver'. This is deprecated. The new driver class is `com.mysql.cj.jdbc.Driver'. The driver is automatically registered via the SPI and manual loading of the driver class is generally unnecessary. 2025-07-08 15:28:13,225 INFO manager.SqlManager: Executing SQL statement: SELECT t.* FROM `page_visits` AS t LIMIT 1 2025-07-08 15:28:13,266 INFO manager.SqlManager: Executing SQL statement: SELECT t.* FROM `page_visits` AS t LIMIT 1 2025-07-08 15:28:13,280 INFO orm.CompilationManager: HADOOP_MAPRED_HOME is /home/hadoop/hadoop3.3 Note: /tmp/sqoop-root/compile/363869e21c2078b9742685122c43a3cc/page_visits.java uses or overrides a deprecated API. Note: Recompile with -Xlint:deprecation for details. 2025-07-08 15:28:16,377 INFO orm.CompilationManager: Writing jar file: /tmp/sqoop-root/compile/363869e21c2078b9742685122c43a3cc/page_visits.jar 2025-07-08 15:28:16,391 INFO mapreduce.ExportJobBase: Beginning export of page_visits 2025-07-08 15:28:16,391 INFO Configuration.deprecation: mapred.job.tracker is deprecated. Instead, use mapreduce.jobtracker.address 2025-07-08 15:28:16,484 INFO Configuration.deprecation: mapred.jar is deprecated. Instead, use mapreduce.job.jar 2025-07-08 15:28:17,339 INFO Configuration.deprecation: mapred.reduce.tasks.speculative.execution is deprecated. Instead, use mapreduce.reduce.speculative 2025-07-08 15:28:17,342 INFO Configuration.deprecation: mapred.map.tasks.speculative.execution is deprecated. Instead, use mapreduce.map.speculative 2025-07-08 15:28:17,343 INFO Configuration.deprecation: mapred.map.tasks is deprecated. Instead, use mapreduce.job.maps 2025-07-08 15:28:17,555 INFO client.DefaultNoHARMFailoverProxyProvider: Connecting to ResourceManager at node/192.168.196.122:8032 2025-07-08 15:28:17,782 INFO mapreduce.JobResourceUploader: Disabling Erasure Coding for path: /tmp/hadoop-yarn/staging/root/.staging/job_1751959003014_0001 2025-07-08 15:28:26,026 INFO input.FileInputFormat: Total input files to process : 1 2025-07-08 15:28:26,029 INFO input.FileInputFormat: Total input files to process : 1 2025-07-08 15:28:26,495 INFO mapreduce.JobSubmitter: number of splits:1 2025-07-08 15:28:26,528 INFO Configuration.deprecation: mapred.map.tasks.speculative.execution is deprecated. Instead, use mapreduce.map.speculative 2025-07-08 15:28:26,619 INFO mapreduce.JobSubmitter: Submitting tokens for job: job_1751959003014_0001 2025-07-08 15:28:26,620 INFO mapreduce.JobSubmitter: Executing with tokens: [] 2025-07-08 15:28:26,805 INFO conf.Configuration: resource-types.xml not found 2025-07-08 15:28:26,805 INFO resource.ResourceUtils: Unable to find 'resource-types.xml'. 2025-07-08 15:28:27,226 INFO impl.YarnClientImpl: Submitted application application_1751959003014_0001 2025-07-08 15:28:27,264 INFO mapreduce.Job: The url to track the job: http://node:8088/proxy/application_1751959003014_0001/ 2025-07-08 15:28:27,264 INFO mapreduce.Job: Running job: job_1751959003014_0001 2025-07-08 15:28:34,334 INFO mapreduce.Job: Job job_1751959003014_0001 running in uber mode : false 2025-07-08 15:28:34,335 INFO mapreduce.Job: map 0% reduce 0% 2025-07-08 15:28:38,374 INFO mapreduce.Job: map 100% reduce 0% 2025-07-08 15:28:38,381 INFO mapreduce.Job: Job job_1751959003014_0001 failed with state FAILED due to: Task failed task_1751959003014_0001_m_000000 Job failed as tasks failed. failedMaps:1 failedReduces:0 killedMaps:0 killedReduces: 0 2025-07-08 15:28:38,448 INFO mapreduce.Job: Counters: 8 Job Counters Failed map tasks=1 Launched map tasks=1 Data-local map tasks=1 Total time spent by all maps in occupied slots (ms)=2061 Total time spent by all reduces in occupied slots (ms)=0 Total time spent by all map tasks (ms)=2061 Total vcore-milliseconds taken by all map tasks=2061 Total megabyte-milliseconds taken by all map tasks=2110464 2025-07-08 15:28:38,456 WARN mapreduce.Counters: Group FileSystemCounters is deprecated. Use org.apache.hadoop.mapreduce.FileSystemCounter instead 2025-07-08 15:28:38,457 INFO mapreduce.ExportJobBase: Transferred 0 bytes in 21.1033 seconds (0 bytes/sec) 2025-07-08 15:28:38,462 WARN mapreduce.Counters: Group org.apache.hadoop.mapred.Task$Counter is deprecated. Use org.apache.hadoop.mapreduce.TaskCounter instead 2025-07-08 15:28:38,462 INFO mapreduce.ExportJobBase: Exported 0 records. 2025-07-08 15:28:38,462 ERROR mapreduce.ExportJobBase: Export job failed! 2025-07-08 15:28:38,463 ERROR tool.ExportTool: Error during export: Export job failed! at org.apache.sqoop.mapreduce.ExportJobBase.runExport(ExportJobBase.java:445) at org.apache.sqoop.manager.SqlManager.exportTable(SqlManager.java:931) at org.apache.sqoop.tool.ExportTool.exportTable(ExportTool.java:80) at org.apache.sqoop.tool.ExportTool.run(ExportTool.java:99) at org.apache.sqoop.Sqoop.run(Sqoop.java:147) at org.apache.hadoop.util.ToolRunner.run(ToolRunner.java:81) at org.apache.sqoop.Sqoop.runSqoop(Sqoop.java:183) at org.apache.sqoop.Sqoop.runTool(Sqoop.java:234) at org.apache.sqoop.Sqoop.runTool(Sqoop.java:243) at org.apache.sqoop.Sqoop.main(Sqoop.java:252) [root@node ~]# sqoop export \ > --connect jdbc:mysql://localhost/weblog_db \ > --username root \ > --password Aa@123456 \ > --table page_visits \ > --export-dir hdfs://node:9000/hive/warehouse/page_visits \ > --input-fields-terminated-by ',' \ > --num-mappers 1 Warning: /home/sqoop-1.4.7/../hcatalog does not exist! HCatalog jobs will fail. Please set $HCAT_HOME to the root of your HCatalog installation. Warning: /home/sqoop-1.4.7/../accumulo does not exist! Accumulo imports will fail. Please set $ACCUMULO_HOME to the root of your Accumulo installation. 2025-07-08 15:30:31,174 INFO sqoop.Sqoop: Running Sqoop version: 1.4.7 2025-07-08 15:30:31,218 WARN tool.BaseSqoopTool: Setting your password on the command-line is insecure. Consider using -P instead. 2025-07-08 15:30:31,333 INFO manager.MySQLManager: Preparing to use a MySQL streaming resultset. 2025-07-08 15:30:31,336 INFO tool.CodeGenTool: Beginning code generation Loading class `com.mysql.jdbc.Driver'. This is deprecated. The new driver class is `com.mysql.cj.jdbc.Driver'. The driver is automatically registered via the SPI and manual loading of the driver class is generally unnecessary. 2025-07-08 15:30:31,771 INFO manager.SqlManager: Executing SQL statement: SELECT t.* FROM `page_visits` AS t LIMIT 1 2025-07-08 15:30:31,814 INFO manager.SqlManager: Executing SQL statement: SELECT t.* FROM `page_visits` AS t LIMIT 1 2025-07-08 15:30:31,821 INFO orm.CompilationManager: HADOOP_MAPRED_HOME is /home/hadoop/hadoop3.3 Note: /tmp/sqoop-root/compile/ab00e36d1f5084a0f7d522b4e9a975e5/page_visits.java uses or overrides a deprecated API. Note: Recompile with -Xlint:deprecation for details. 2025-07-08 15:30:33,116 INFO orm.CompilationManager: Writing jar file: /tmp/sqoop-root/compile/ab00e36d1f5084a0f7d522b4e9a975e5/page_visits.jar 2025-07-08 15:30:33,129 INFO mapreduce.ExportJobBase: Beginning export of page_visits 2025-07-08 15:30:33,129 INFO Configuration.deprecation: mapred.job.tracker is deprecated. Instead, use mapreduce.jobtracker.address 2025-07-08 15:30:33,212 INFO Configuration.deprecation: mapred.jar is deprecated. Instead, use mapreduce.job.jar 2025-07-08 15:30:33,877 INFO Configuration.deprecation: mapred.reduce.tasks.speculative.execution is deprecated. Instead, use mapreduce.reduce.speculative 2025-07-08 15:30:33,880 INFO Configuration.deprecation: mapred.map.tasks.speculative.execution is deprecated. Instead, use mapreduce.map.speculative 2025-07-08 15:30:33,880 INFO Configuration.deprecation: mapred.map.tasks is deprecated. Instead, use mapreduce.job.maps 2025-07-08 15:30:34,097 INFO client.DefaultNoHARMFailoverProxyProvider: Connecting to ResourceManager at node/192.168.196.122:8032 2025-07-08 15:30:34,310 INFO mapreduce.JobResourceUploader: Disabling Erasure Coding for path: /tmp/hadoop-yarn/staging/root/.staging/job_1751959003014_0002 2025-07-08 15:30:39,127 INFO input.FileInputFormat: Total input files to process : 1 2025-07-08 15:30:39,131 INFO input.FileInputFormat: Total input files to process : 1 2025-07-08 15:30:39,995 INFO mapreduce.JobSubmitter: number of splits:1 2025-07-08 15:30:40,022 INFO Configuration.deprecation: mapred.map.tasks.speculative.execution is deprecated. Instead, use mapreduce.map.speculative 2025-07-08 15:30:40,532 INFO mapreduce.JobSubmitter: Submitting tokens for job: job_1751959003014_0002 2025-07-08 15:30:40,532 INFO mapreduce.JobSubmitter: Executing with tokens: [] 2025-07-08 15:30:40,689 INFO conf.Configuration: resource-types.xml not found 2025-07-08 15:30:40,689 INFO resource.ResourceUtils: Unable to find 'resource-types.xml'. 2025-07-08 15:30:40,746 INFO impl.YarnClientImpl: Submitted application application_1751959003014_0002 2025-07-08 15:30:40,783 INFO mapreduce.Job: The url to track the job: http://node:8088/proxy/application_1751959003014_0002/ 2025-07-08 15:30:40,784 INFO mapreduce.Job: Running job: job_1751959003014_0002 2025-07-08 15:30:46,847 INFO mapreduce.Job: Job job_1751959003014_0002 running in uber mode : false 2025-07-08 15:30:46,848 INFO mapreduce.Job: map 0% reduce 0% 2025-07-08 15:30:50,893 INFO mapreduce.Job: map 100% reduce 0% 2025-07-08 15:30:51,905 INFO mapreduce.Job: Job job_1751959003014_0002 failed with state FAILED due to: Task failed task_1751959003014_0002_m_000000 Job failed as tasks failed. failedMaps:1 failedReduces:0 killedMaps:0 killedReduces: 0 2025-07-08 15:30:51,973 INFO mapreduce.Job: Counters: 8 Job Counters Failed map tasks=1 Launched map tasks=1 Data-local map tasks=1 Total time spent by all maps in occupied slots (ms)=2058 Total time spent by all reduces in occupied slots (ms)=0 Total time spent by all map tasks (ms)=2058 Total vcore-milliseconds taken by all map tasks=2058 Total megabyte-milliseconds taken by all map tasks=2107392 2025-07-08 15:30:51,979 WARN mapreduce.Counters: Group FileSystemCounters is deprecated. Use org.apache.hadoop.mapreduce.FileSystemCounter instead 2025-07-08 15:30:51,980 INFO mapreduce.ExportJobBase: Transferred 0 bytes in 18.0828 seconds (0 bytes/sec) 2025-07-08 15:30:51,983 WARN mapreduce.Counters: Group org.apache.hadoop.mapred.Task$Counter is deprecated. Use org.apache.hadoop.mapreduce.TaskCounter instead 2025-07-08 15:30:51,983 INFO mapreduce.ExportJobBase: Exported 0 records. 2025-07-08 15:30:51,983 ERROR mapreduce.ExportJobBase: Export job failed! 2025-07-08 15:30:51,984 ERROR tool.ExportTool: Error during export: Export job failed! at org.apache.sqoop.mapreduce.ExportJobBase.runExport(ExportJobBase.java:445) at org.apache.sqoop.manager.SqlManager.exportTable(SqlManager.java:931) at org.apache.sqoop.tool.ExportTool.exportTable(ExportTool.java:80) at org.apache.sqoop.tool.ExportTool.run(ExportTool.java:99) at org.apache.sqoop.Sqoop.run(Sqoop.java:147) at org.apache.hadoop.util.ToolRunner.run(ToolRunner.java:81) at org.apache.sqoop.Sqoop.runSqoop(Sqoop.java:183) at org.apache.sqoop.Sqoop.runTool(Sqoop.java:234) at org.apache.sqoop.Sqoop.runTool(Sqoop.java:243) at org.apache.sqoop.Sqoop.main(Sqoop.java:252) [root@node ~]# 6.2 Sqoop导出数据 6.2.1从Hive将数据导出到MySQL 6.2.2sqoop导出格式 6.2.3导出page_visits表 6.2.4导出到ip_visits表 6.3验证导出数据 6.3.1登录MySQL 6.3.2执行查询
07-09
[root@localhost sqoop-1.4.7.bin__hadoop-2.6.0]# bin/sqoop export --connect jdbc:mysql://localhost:3306/dbtaobao --username root --password 123456 --table user_log --export-dir '/user/hive/warehouse/dbtaobao.db/inner_user_log' --fields-terminated-by ','; Warning: /usr/sqoop/sqoop-1.4.7.bin__hadoop-2.6.0/../hbase does not exist! HBase imports will fail. Please set $HBASE_HOME to the root of your HBase installation. Warning: /usr/sqoop/sqoop-1.4.7.bin__hadoop-2.6.0/../hcatalog does not exist! HCatalog jobs will fail. Please set $HCAT_HOME to the root of your HCatalog installation. Warning: /usr/sqoop/sqoop-1.4.7.bin__hadoop-2.6.0/../accumulo does not exist! Accumulo imports will fail. Please set $ACCUMULO_HOME to the root of your Accumulo installation. Warning: /usr/sqoop/sqoop-1.4.7.bin__hadoop-2.6.0/../zookeeper does not exist! Accumulo imports will fail. Please set $ZOOKEEPER_HOME to the root of your Zookeeper installation. 2025-06-06 00:44:19,902 INFO sqoop.Sqoop: Running Sqoop version: 1.4.7 2025-06-06 00:44:20,015 WARN tool.BaseSqoopTool: Setting your password on the command-line is insecure. Consider using -P instead. 2025-06-06 00:44:20,281 INFO manager.MySQLManager: Preparing to use a MySQL streaming resultset. 2025-06-06 00:44:20,287 INFO tool.CodeGenTool: Beginning code generation Loading class `com.mysql.jdbc.Driver'. This is deprecated. The new driver class is `com.mysql.cj.jdbc.Driver'. The driver is automatically registered via the SPI and manual loading of the driver class is generally unnecessary. 2025-06-06 00:44:22,137 INFO manager.SqlManager: Executing SQL statement: SELECT t.* FROM `user_log` AS t LIMIT 1 2025-06-06 00:44:22,270 INFO manager.SqlManager: Executing SQL statement: SELECT t.* FROM `user_log` AS t LIMIT 1 2025-06-06 00:44:22,293 INFO orm.CompilationManager: HADOOP_MAPRED_HOME is /usr/hadoop/hadoop-3.1.3 注: /tmp/sqoop-root/compile/2647ebe9a3777fcaa95ca65a919294ec/user_log.java使用或覆盖了已过时的 API。 注: 有关详细信息, 请使用 -Xlint:deprecation 重新编译。 2025-06-06 00:44:26,610 INFO orm.CompilationManager: Writing jar file: /tmp/sqoop-root/compile/2647ebe9a3777fcaa95ca65a919294ec/user_log.jar 2025-06-06 00:44:26,629 INFO mapreduce.ExportJobBase: Beginning export of user_log 2025-06-06 00:44:26,629 INFO Configuration.deprecation: mapred.job.tracker is deprecated. Instead, use mapreduce.jobtracker.address 2025-06-06 00:44:27,018 INFO Configuration.deprecation: mapred.jar is deprecated. Instead, use mapreduce.job.jar 2025-06-06 00:44:28,689 INFO sasl.SaslDataTransferClient: SASL encryption trust check: localHostTrusted = false, remoteHostTrusted = false 2025-06-06 00:44:28,961 INFO Configuration.deprecation: mapred.reduce.tasks.speculative.execution is deprecated. Instead, use mapreduce.reduce.speculative 2025-06-06 00:44:28,966 INFO Configuration.deprecation: mapred.map.tasks.speculative.execution is deprecated. Instead, use mapreduce.map.speculative 2025-06-06 00:44:28,966 INFO Configuration.deprecation: mapred.map.tasks is deprecated. Instead, use mapreduce.job.maps 2025-06-06 00:44:29,494 INFO impl.MetricsConfig: loaded properties from hadoop-metrics2.properties 2025-06-06 00:44:29,762 INFO impl.MetricsSystemImpl: Scheduled Metric snapshot period at 10 second(s). 2025-06-06 00:44:29,762 INFO impl.MetricsSystemImpl: JobTracker metrics system started 2025-06-06 00:44:30,031 INFO input.FileInputFormat: Total input files to process : 1 2025-06-06 00:44:30,053 INFO input.FileInputFormat: Total input files to process : 1 2025-06-06 00:44:30,136 INFO mapreduce.JobSubmitter: number of splits:4 2025-06-06 00:44:30,284 INFO Configuration.deprecation: mapred.map.tasks.speculative.execution is deprecated. Instead, use mapreduce.map.speculative 2025-06-06 00:44:30,660 INFO mapreduce.JobSubmitter: Submitting tokens for job: job_local583218486_0001 2025-06-06 00:44:30,660 INFO mapreduce.JobSubmitter: Executing with tokens: [] 2025-06-06 00:44:31,271 INFO mapred.LocalDistributedCacheManager: Creating symlink: /tmp/hadoop-root/mapred/local/1749141870870/libjars <- /usr/sqoop/sqoop-1.4.7.bin__hadoop-2.6.0/libjars/* 2025-06-06 00:44:31,278 WARN fs.FileUtil: Command 'ln -s /tmp/hadoop-root/mapred/local/1749141870870/libjars /usr/sqoop/sqoop-1.4.7.bin__hadoop-2.6.0/libjars/*' failed 1 with: ln: 无法创建符号链接"/usr/sqoop/sqoop-1.4.7.bin__hadoop-2.6.0/libjars/*": 没有那个文件或目录 2025-06-06 00:44:31,278 WARN mapred.LocalDistributedCacheManager: Failed to create symlink: /tmp/hadoop-root/mapred/local/1749141870870/libjars <- /usr/sqoop/sqoop-1.4.7.bin__hadoop-2.6.0/libjars/* 2025-06-06 00:44:31,278 INFO mapred.LocalDistributedCacheManager: Localized file:/tmp/hadoop/mapred/staging/root583218486/.staging/job_local583218486_0001/libjars as file:/tmp/hadoop-root/mapred/local/1749141870870/libjars 2025-06-06 00:44:31,569 INFO mapreduce.Job: The url to track the job: http://localhost:8080/ 2025-06-06 00:44:31,571 INFO mapreduce.Job: Running job: job_local583218486_0001 2025-06-06 00:44:31,614 INFO mapred.LocalJobRunner: OutputCommitter set in config null 2025-06-06 00:44:31,631 INFO mapred.LocalJobRunner: OutputCommitter is org.apache.sqoop.mapreduce.NullOutputCommitter 2025-06-06 00:44:31,798 INFO mapred.LocalJobRunner: Waiting for map tasks 2025-06-06 00:44:31,802 INFO mapred.LocalJobRunner: Starting task: attempt_local583218486_0001_m_000000_0 2025-06-06 00:44:32,000 INFO mapred.Task: Using ResourceCalculatorProcessTree : [ ] 2025-06-06 00:44:32,006 INFO mapred.MapTask: Processing split: Paths:/user/hive/warehouse/dbtaobao.db/inner_user_log/000000_0:355065+59179,/user/hive/warehouse/dbtaobao.db/inner_user_log/000000_0:414244+59179 2025-06-06 00:44:32,012 INFO Configuration.deprecation: map.input.file is deprecated. Instead, use mapreduce.map.input.file 2025-06-06 00:44:32,012 INFO Configuration.deprecation: map.input.start is deprecated. Instead, use mapreduce.map.input.start 2025-06-06 00:44:32,012 INFO Configuration.deprecation: map.input.length is deprecated. Instead, use mapreduce.map.input.length 2025-06-06 00:44:32,050 INFO sasl.SaslDataTransferClient: SASL encryption trust check: localHostTrusted = false, remoteHostTrusted = false 2025-06-06 00:44:32,260 INFO sasl.SaslDataTransferClient: SASL encryption trust check: localHostTrusted = false, remoteHostTrusted = false 2025-06-06 00:44:32,299 INFO mapreduce.AutoProgressMapper: Auto-progress thread is finished. keepGoing=false 2025-06-06 00:44:32,328 INFO mapred.LocalJobRunner: Starting task: attempt_local583218486_0001_m_000001_0 2025-06-06 00:44:32,357 INFO mapred.Task: Using ResourceCalculatorProcessTree : [ ] 2025-06-06 00:44:32,359 INFO mapred.MapTask: Processing split: Paths:/user/hive/warehouse/dbtaobao.db/inner_user_log/000000_0:0+118355 2025-06-06 00:44:32,387 INFO sasl.SaslDataTransferClient: SASL encryption trust check: localHostTrusted = false, remoteHostTrusted = false 2025-06-06 00:44:32,524 INFO mapreduce.AutoProgressMapper: Auto-progress thread is finished. keepGoing=false 2025-06-06 00:44:32,553 INFO mapred.LocalJobRunner: Starting task: attempt_local583218486_0001_m_000002_0 2025-06-06 00:44:32,566 INFO mapred.Task: Using ResourceCalculatorProcessTree : [ ] 2025-06-06 00:44:32,567 INFO mapred.MapTask: Processing split: Paths:/user/hive/warehouse/dbtaobao.db/inner_user_log/000000_0:118355+118355 2025-06-06 00:44:32,585 INFO mapreduce.Job: Job job_local583218486_0001 running in uber mode : false 2025-06-06 00:44:32,587 INFO mapreduce.Job: map 0% reduce 0% 2025-06-06 00:44:32,616 INFO sasl.SaslDataTransferClient: SASL encryption trust check: localHostTrusted = false, remoteHostTrusted = false 2025-06-06 00:44:32,745 INFO sasl.SaslDataTransferClient: SASL encryption trust check: localHostTrusted = false, remoteHostTrusted = false 2025-06-06 00:44:32,799 INFO mapreduce.AutoProgressMapper: Auto-progress thread is finished. keepGoing=false 2025-06-06 00:44:32,866 INFO mapred.LocalJobRunner: Starting task: attempt_local583218486_0001_m_000003_0 2025-06-06 00:44:32,901 INFO mapred.Task: Using ResourceCalculatorProcessTree : [ ] 2025-06-06 00:44:32,903 INFO mapred.MapTask: Processing split: Paths:/user/hive/warehouse/dbtaobao.db/inner_user_log/000000_0:236710+118355 2025-06-06 00:44:32,930 INFO sasl.SaslDataTransferClient: SASL encryption trust check: localHostTrusted = false, remoteHostTrusted = false 2025-06-06 00:44:33,004 INFO sasl.SaslDataTransferClient: SASL encryption trust check: localHostTrusted = false, remoteHostTrusted = false 2025-06-06 00:44:33,034 INFO mapreduce.AutoProgressMapper: Auto-progress thread is finished. keepGoing=false 2025-06-06 00:44:33,049 INFO mapred.LocalJobRunner: map task executor complete. 2025-06-06 00:44:33,050 WARN mapred.LocalJobRunner: job_local583218486_0001 java.lang.Exception: java.io.IOException: java.lang.ClassNotFoundException: user_log at org.apache.hadoop.mapred.LocalJobRunner$Job.runTasks(LocalJobRunner.java:492) at org.apache.hadoop.mapred.LocalJobRunner$Job.run(LocalJobRunner.java:552) Caused by: java.io.IOException: java.lang.ClassNotFoundException: user_log at org.apache.sqoop.mapreduce.TextExportMapper.setup(TextExportMapper.java:74) at org.apache.hadoop.mapreduce.Mapper.run(Mapper.java:143) at org.apache.sqoop.mapreduce.AutoProgressMapper.run(AutoProgressMapper.java:64) at org.apache.hadoop.mapred.MapTask.runNewMapper(MapTask.java:799) at org.apache.hadoop.mapred.MapTask.run(MapTask.java:347) at org.apache.hadoop.mapred.LocalJobRunner$Job$MapTaskRunnable.run(LocalJobRunner.java:271) at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:511) at java.util.concurrent.FutureTask.run(FutureTask.java:266) at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) at java.lang.Thread.run(Thread.java:748) Caused by: java.lang.ClassNotFoundException: user_log at java.net.URLClassLoader.findClass(URLClassLoader.java:382) at java.lang.ClassLoader.loadClass(ClassLoader.java:424) at sun.misc.Launcher$AppClassLoader.loadClass(Launcher.java:349) at java.lang.ClassLoader.loadClass(ClassLoader.java:357) at java.lang.Class.forName0(Native Method) at java.lang.Class.forName(Class.java:348) at org.apache.sqoop.mapreduce.TextExportMapper.setup(TextExportMapper.java:70) ... 10 more 2025-06-06 00:44:33,590 INFO mapreduce.Job: Job job_local583218486_0001 failed with state FAILED due to: NA 2025-06-06 00:44:33,599 INFO mapreduce.Job: Counters: 0 2025-06-06 00:44:33,623 WARN mapreduce.Counters: Group FileSystemCounters is deprecated. Use org.apache.hadoop.mapreduce.FileSystemCounter instead 2025-06-06 00:44:33,625 INFO mapreduce.ExportJobBase: Transferred 0 bytes in 4.6274 seconds (0 bytes/sec) 2025-06-06 00:44:33,626 WARN mapreduce.Counters: Group org.apache.hadoop.mapred.Task$Counter is deprecated. Use org.apache.hadoop.mapreduce.TaskCounter instead 2025-06-06 00:44:33,626 INFO mapreduce.ExportJobBase: Exported 0 records. 2025-06-06 00:44:33,626 ERROR mapreduce.ExportJobBase: Export job failed! 2025-06-06 00:44:33,626 ERROR tool.ExportTool: Error during export: Export job failed! at org.apache.sqoop.mapreduce.ExportJobBase.runExport(ExportJobBase.java:445) at org.apache.sqoop.manager.SqlManager.exportTable(SqlManager.java:931) at org.apache.sqoop.tool.ExportTool.exportTable(ExportTool.java:80) at org.apache.sqoop.tool.ExportTool.run(ExportTool.java:99) at org.apache.sqoop.Sqoop.run(Sqoop.java:147) at org.apache.hadoop.util.ToolRunner.run(ToolRunner.java:76) at org.apache.sqoop.Sqoop.runSqoop(Sqoop.java:183) at org.apache.sqoop.Sqoop.runTool(Sqoop.java:234) at org.apache.sqoop.Sqoop.runTool(Sqoop.java:243) at org.apache.sqoop.Sqoop.main(Sqoop.java:252) [root@localhost sqoop-1.4.7.bin__hadoop-2.6.0]# 给我解决方案
06-07
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值