运行程序:
jpan@jpan-Beijing:~/Software/spark-0.9.1$ ./bin/run-example org.apache.spark.examples.ExceptionHandlingTest spark://jpan-Beijing:7077
结果为:
14/06/04 15:59:10 WARN scheduler.TaskSetManager: Lost TID 0 (task 0.0:0)
14/06/04 15:59:10 WARN scheduler.TaskSetManager: Loss was due to java.lang.Exception
java.lang.Exception: Testing exception handling
at org.apache.spark.examples.ExceptionHandlingTest$$anonfun$main$1.apply$mcVI$sp(ExceptionHandlingTest.scala:33)
at org.apache.spark.examples.ExceptionHandlingTest$$anonfun$main$1.apply(ExceptionHandlingTest.scala:31)
at org.apache.spark.examples.ExceptionHandlingTest$$anonfun$main$1.apply(ExceptionHandlingTest.scala:31)
at scala.collection.Iterator$class.foreach(Iterator.scala:727)
at org.apache.spark.InterruptibleIterator.foreach(InterruptibleIterator.scala:24)
at org.apache.spark.rdd.RDD$$anonfun$foreach$1.apply(RDD.scala:594)
at org.apache.spark.rdd.RDD$$anonfun$foreach$1.apply(RDD.scala:594)
at org.apache.spark.SparkContext$$anonfun$runJob$4.apply(SparkContext.scala:884)
at org.apache.spark.SparkContext$$anonfun$runJob$4.apply(SparkContext.scala:884)
at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:109)
at org.apache.spark.scheduler.Task.run(Task.scala:53)
at org.apache.spark.executor.Executor$TaskRunner$$anonfun$run$1.apply$mcV$sp(Executor.scala:211)
at org.apache.spark.deploy.SparkHadoopUtil$$anon$1.run(SparkHadoopUtil.scala:42)
at org.apache.spark.deploy.SparkHadoopUtil$$anon$1.run(SparkHadoopUtil.scala:41)
at java.security.AccessController.doPrivileged(Native Method)
at javax.security.auth.Subject.doAs(Subject.java:415)
at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1121)
at org.apache.spark.deploy.SparkHadoopUtil.runAsUser(SparkHadoopUtil.scala:41)
at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:176)
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
at java.lang.Thread.run(Thread.java:744)
14/06/04 15:59:10 INFO scheduler.TaskSetManager: Starting task 0.0:0 as TID 2 on executor 0: jpan-Beijing.local (PROCESS_LOCAL)
14/06/04 15:59:10 INFO scheduler.TaskSetManager: Serialized task 0.0:0 as 1195 bytes in 0 ms
14/06/04 15:59:10 INFO scheduler.TaskSetManager: Finished TID 1 in 4459 ms on jpan-Beijing.local (progress: 1/2)
14/06/04 15:59:10 INFO scheduler.TaskSetManager: Finished TID 2 in 60 ms on jpan-Beijing.local (progress: 2/2)
14/06/04 15:59:10 INFO scheduler.DAGScheduler: Completed ResultTask(0, 1)
14/06/04 15:59:10 INFO scheduler.DAGScheduler: Completed ResultTask(0, 0)
14/06/04 15:59:10 INFO scheduler.TaskSchedulerImpl: Removed TaskSet 0.0, whose tasks have all completed, from pool
14/06/04 15:59:10 INFO scheduler.DAGScheduler: Stage 0 (foreach at ExceptionHandlingTest.scala:31) finished in 10.428 s
14/06/04 15:59:10 INFO spark.SparkContext: Job finished: foreach at ExceptionHandlingTest.scala:31, took 10.614573823 s
源码为:
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.examples
import org.apache.spark.SparkContext
object ExceptionHandlingTest {
def main(args: Array[String]) {
if (args.length == 0) {
System.err.println("Usage: ExceptionHandlingTest <master>")
System.exit(1)
}
val sc = new SparkContext(args(0), "ExceptionHandlingTest",
System.getenv("SPARK_HOME"), SparkContext.jarOfClass(this.getClass))
sc.parallelize(0 until sc.defaultParallelism).foreach { i =>
if (math.random > 0.75) {
throw new Exception("Testing exception handling")
}
}
sc.stop()
}
}
这个源码很简单,不多做解释。