import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaPairRDD;
import org.apache.spark.api.java.JavaSparkContext;
import scala.Tuple2;
import java.util.Arrays;
import java.util.List;
public class GroupByKeyDemo {
public static void main(String[] args) {
SparkConf conf = new SparkConf().setMaster("local").setAppName("spark");
JavaSparkContext sc = new JavaSparkContext(conf);
List<Tuple2<String,Integer>> list = Arrays.asList(
new Tuple2<>("ty",1),
new Tuple2<>("dd",1),
new Tuple2<>("tt",2),
new Tuple2<>("dd",2),
new Tuple2<>("tt",1),
new Tuple2<>("ty",3)
);
JavaPairRDD<String,Integer> javaPairRDD = sc.parallelizePairs(list);
JavaPairRDD<String,Iterable<Integer>> javaPairRDD1 = javaPairRDD.groupByKey(3);
System.err.println(javaPairRDD1.collect());
}
}