package day11;
import org.apache.calcite.util.Static;
import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.api.java.function.Function;
import org.apache.spark.api.java.function.VoidFunction;
import akka.dispatch.Filter;
import antlr.collections.List;
public class TransfomationsTest {
public static void main(String[] args) {
SparkConf conf=new SparkConf();
conf.setAppName("test").setMaster("local");
JavaSparkContext sc=new JavaSparkContext(conf);
JavaRDD<String> lines=sc.textFile("./words");
JavaRDD<String> result=lines.sample(true, 0.1,100);
result.foreach(new VoidFunction<String>() {
private static final long serialVersionUID=1L;
@Override
public void call(String arg0) throws Exception {
System.out.println(arg0);
}
});
List<String> take=lines.take(3);
for(String s:take) {
System.out.println(s);
}
String first=lines.first();
System.out.println(first);
lines.filter(new Function<String, Boolean>() {
private static final long SerialVersionUID=1L;
@Override
public Boolean call(String line) throws Exception {
return "hello spark".equals(line);
}
});
List<String> collectionList=Filter.collect();
for(String s:collection) {
System.out.println(s);
}
long count = Filter.count();
System.out.println(count);
Filter.foreach(new VoidFunction<String>() {
private static final long serialVersionUID=1L;
@Override
public void call(String arg0) throws Exception {
System.out.println(arg0);
}
});
}
}
