这就是我所做的。这将切断一个文件。Hashmap 是高性能的一个很好的选择,并且将优于任何类型的排序。您还可以查看其中的更简洁的排序和切片功能。
import java.io.FileNotFoundException
/**.
* Cohesive static method object for file handling.
*/
object WordCountFileHandler {
val FILE_FORMAT = "utf-8"
/**
* Take input from file. Split on spaces.
* @param fileLocationAndName string location of file
* @return option of string iterator
*/
def apply (fileLocationAndName: String) : Option[Iterator[String]] = {
apply (fileLocationAndName, " ")
}
/**
* Split on separator parameter.
* Speculative generality :P
* @param fileLocationAndName string location of file
* @param wordSeperator split on this string
* @return
*/
def apply (fileLocationAndName: String, wordSeperator: String): Option[Iterator[String]] = {
try{
val words = scala.io.Source.fromFile(fileLocationAndName).getLines() //scala io.Source is a bit hackey. No need to close file.
//Get rid of anything funky... need the double space removal for files like the README.md...
val wordList = words.reduceLeft(_ + wordSeperator + _).replaceAll("[^a-zA-Z\\s]", "").replaceAll(" ", "").split(wordSeperator)
//wordList.foreach(println(_))
wordList.length match {
case 0 => return None
case _ => return Some(wordList.toIterator)
}
} catch {
case _:FileNotFoundException => println("file not found: " + fileLocationAndName); return None
case e:Exception => println("Unknown exception occurred during file handling: \n\n" + e.getStackTrace); return None
}
}
}
import collection.mutable
/**
* Static method object.
* Takes a processed map and spits out the needed info
* While a small performance hit is made in not doing this during the word list analysis,
* this does demonstrate cohesion and open/closed much better.
* author: jason goodwin
*/
object WordMapAnalyzer {
/**
* get input size
* @param input
* @return
*/
def getNumberOfWords(input: mutable.Map[String, Int]): Int = {
input.size
}
/**
* Should be fairly logarithmic given merge sort performance is generally about O(6nlog2n + 6n).
* See below for more performant method.
* @param input
* @return
*/
def getTopCWordsDeclarative(input: mutable.HashMap[String, Int], c: Int): Map[String, Int] = {
val sortedInput = input.toList.sortWith(_._2 > _._2)
sortedInput.take(c).toMap
}
/**
* Imperative style is used here for much better performance relative to the above.
* Growth can be reasoned at linear growth on random input.
* Probably upper bounded around O(3n + nc) in worst case (ie a sorted input from small to high).
* @param input
* @param c
* @return
*/
def getTopCWordsImperative(input: mutable.Map[String, Int], c: Int): mutable.Map[String, Int] = {
var bottomElement: (String, Int) = ("", 0)
val topList = mutable.HashMap[String, Int]()
for (x <- input) {
if (x._2 >= bottomElement._2 && topList.size == c ){
topList -= (bottomElement._1)
topList +=((x._1, x._2))
bottomElement = topList.toList.minBy(_._2)
} else if (topList.size < c ){
topList +=((x._1, x._2))
bottomElement = topList.toList.minBy(_._2)
}
}
//println("Size: " + topList.size)
topList.asInstanceOf[mutable.Map[String, Int]]
}
}
object WordMapCountCalculator {
/**
* Take a list and return a map keyed by words with a count as the value.
* @param wordList List[String] to be analysed
* @return HashMap[String, Int] with word as key and count as pair.
* */
def apply (wordList: Iterator[String]): mutable.Map[String, Int] = {
wordList.foldLeft(new mutable.HashMap[String, Int])((word, count) => {
word get(count) match{
case Some(x) => word += (count -> (x+1)) //if in map already, increment count
case None => word += (count -> 1) //otherwise, set to 1
}
}).asInstanceOf[mutable.Map[String, Int]]
}