SPark二次排序

package com.ali.scala.secondSort
import org.apache.spark.{SparkConf, SparkContext}

/**
* 二次排序的问题
* @param key
* @param value
*/
case class Key(val key: Int, val value: Int) extends Ordered[Key] {
def compare(that: Key): Int = {
if (this.key – that.key == 0)
this.value – that.value
else
this.key – that.key
}
}

object SecondSort {
def main(args: Array[String]): Unit = {
val conf = new SparkConf()
.setMaster(“local”)
.setAppName(“hello”)
val sc=new SparkContext(conf)
val fileRdd = sc.textFile(“spark-wordcount/data/secondSort.txt”)
val sorkRdd = fileRdd.map(lines => {
(Key(lines.split(” “)(0).toInt, lines.split(” “)(1).toInt), lines)
})
sorkRdd.sortByKey(false).map(_._2).foreach(println)

}
}

    原文作者:风中追风zz
    原文地址: https://zhuanlan.zhihu.com/p/79917401
    本文转自网络文章,转载此文章仅为分享知识,如有侵权,请联系博主进行删除。
点赞