import java.util.list报错_import java.util.Arrays;import
该楼层疑似违规已被系统折叠隐藏此楼查看此楼import java.util.Arrays;import java.util.List;import java.util.regex.Pattern;import org.apache.spark.SparkConf;import org.apache.spark.api.java.JavaPairRDD;import org.apache.spar.
该楼层疑似违规已被系统折叠 隐藏此楼查看此楼
import java.util.Arrays;
import java.util.List;
import java.util.regex.Pattern;
import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaPairRDD;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import scala.Tuple2;
public class TestTemple {
private static final Pattern SPACE = Pattern.compile(" ");
public static void main(String[] args) {
// TODO Auto-generated method stub
TestTemple ts = new TestTemple();
ts.test();
}
public void test() {
SparkConf sconf = new SparkConf().setAppName("test").setMaster("local[2]");
JavaSparkContext sc = new JavaSparkContext(sconf);
JavaRDD l1 = sc.textFile("file:///home/zqc/spark-2.3.2-bin-hadoop2.7/NOTICE");
JavaRDD l3 = l1.flatMap(s -> {
return Arrays.asList(SPACE.split(s)).iterator();
});
JavaPairRDD ones = l3.mapToPair(s -> new Tuple2<>(s, 1));
JavaPairRDD cou = ones.reduceByKey((i1, i2) -> i1+i2);
List> re = cou.collect();
for (Tuple2 s : re) {
System.out.println(s._1() + ":" + s._2());
}
}
}
更多推荐




所有评论(0)