SPARK-1316. Remove use of Commons IO

Commons IO is actually barely used, and is not a declared dependency. This just replaces with equivalents from the JDK and Guava.

Author: Sean Owen <sowen@cloudera.com>

Closes #1173 from srowen/SPARK-1316 and squashes the following commits:

2eb53db [Sean Owen] Reorder Guava import
8fde404 [Sean Owen] Remove use of Commons IO, which is not actually a dependency
This commit is contained in:
Sean Owen 2014-06-22 11:47:49 -07:00 committed by Reynold Xin
parent 476581e8c8
commit 9fe28c35df
2 changed files with 13 additions and 10 deletions

View file

@ -19,7 +19,7 @@ package org.apache.spark.util.logging
import java.io.{File, FileFilter, InputStream} import java.io.{File, FileFilter, InputStream}
import org.apache.commons.io.FileUtils import com.google.common.io.Files
import org.apache.spark.SparkConf import org.apache.spark.SparkConf
import RollingFileAppender._ import RollingFileAppender._
@ -83,7 +83,7 @@ private[spark] class RollingFileAppender(
logDebug(s"Attempting to rollover file $activeFile to file $rolloverFile") logDebug(s"Attempting to rollover file $activeFile to file $rolloverFile")
if (activeFile.exists) { if (activeFile.exists) {
if (!rolloverFile.exists) { if (!rolloverFile.exists) {
FileUtils.moveFile(activeFile, rolloverFile) Files.move(activeFile, rolloverFile)
logInfo(s"Rolled over $activeFile to $rolloverFile") logInfo(s"Rolled over $activeFile to $rolloverFile")
} else { } else {
// In case the rollover file name clashes, make a unique file name. // In case the rollover file name clashes, make a unique file name.
@ -100,7 +100,7 @@ private[spark] class RollingFileAppender(
logWarning(s"Rollover file $rolloverFile already exists, " + logWarning(s"Rollover file $rolloverFile already exists, " +
s"rolled over $activeFile to file $altRolloverFile") s"rolled over $activeFile to file $altRolloverFile")
FileUtils.moveFile(activeFile, altRolloverFile) Files.move(activeFile, altRolloverFile)
} }
} else { } else {
logWarning(s"File $activeFile does not exist") logWarning(s"File $activeFile does not exist")

View file

@ -18,13 +18,16 @@
package org.apache.spark.util package org.apache.spark.util
import java.io._ import java.io._
import java.nio.charset.Charset
import scala.collection.mutable.HashSet import scala.collection.mutable.HashSet
import scala.reflect._ import scala.reflect._
import org.apache.commons.io.{FileUtils, IOUtils}
import org.apache.spark.{Logging, SparkConf}
import org.scalatest.{BeforeAndAfter, FunSuite} import org.scalatest.{BeforeAndAfter, FunSuite}
import com.google.common.io.Files
import org.apache.spark.{Logging, SparkConf}
import org.apache.spark.util.logging.{RollingFileAppender, SizeBasedRollingPolicy, TimeBasedRollingPolicy, FileAppender} import org.apache.spark.util.logging.{RollingFileAppender, SizeBasedRollingPolicy, TimeBasedRollingPolicy, FileAppender}
class FileAppenderSuite extends FunSuite with BeforeAndAfter with Logging { class FileAppenderSuite extends FunSuite with BeforeAndAfter with Logging {
@ -41,11 +44,11 @@ class FileAppenderSuite extends FunSuite with BeforeAndAfter with Logging {
test("basic file appender") { test("basic file appender") {
val testString = (1 to 1000).mkString(", ") val testString = (1 to 1000).mkString(", ")
val inputStream = IOUtils.toInputStream(testString) val inputStream = new ByteArrayInputStream(testString.getBytes(Charset.forName("UTF-8")))
val appender = new FileAppender(inputStream, testFile) val appender = new FileAppender(inputStream, testFile)
inputStream.close() inputStream.close()
appender.awaitTermination() appender.awaitTermination()
assert(FileUtils.readFileToString(testFile) === testString) assert(Files.toString(testFile, Charset.forName("UTF-8")) === testString)
} }
test("rolling file appender - time-based rolling") { test("rolling file appender - time-based rolling") {
@ -93,7 +96,7 @@ class FileAppenderSuite extends FunSuite with BeforeAndAfter with Logging {
val allGeneratedFiles = new HashSet[String]() val allGeneratedFiles = new HashSet[String]()
val items = (1 to 10).map { _.toString * 10000 } val items = (1 to 10).map { _.toString * 10000 }
for (i <- 0 until items.size) { for (i <- 0 until items.size) {
testOutputStream.write(items(i).getBytes("UTF8")) testOutputStream.write(items(i).getBytes(Charset.forName("UTF-8")))
testOutputStream.flush() testOutputStream.flush()
allGeneratedFiles ++= RollingFileAppender.getSortedRolledOverFiles( allGeneratedFiles ++= RollingFileAppender.getSortedRolledOverFiles(
testFile.getParentFile.toString, testFile.getName).map(_.toString) testFile.getParentFile.toString, testFile.getName).map(_.toString)
@ -197,7 +200,7 @@ class FileAppenderSuite extends FunSuite with BeforeAndAfter with Logging {
// send data to appender through the input stream, and wait for the data to be written // send data to appender through the input stream, and wait for the data to be written
val expectedText = textToAppend.mkString("") val expectedText = textToAppend.mkString("")
for (i <- 0 until textToAppend.size) { for (i <- 0 until textToAppend.size) {
outputStream.write(textToAppend(i).getBytes("UTF8")) outputStream.write(textToAppend(i).getBytes(Charset.forName("UTF-8")))
outputStream.flush() outputStream.flush()
Thread.sleep(sleepTimeBetweenTexts) Thread.sleep(sleepTimeBetweenTexts)
} }
@ -212,7 +215,7 @@ class FileAppenderSuite extends FunSuite with BeforeAndAfter with Logging {
logInfo("Filtered files: \n" + generatedFiles.mkString("\n")) logInfo("Filtered files: \n" + generatedFiles.mkString("\n"))
assert(generatedFiles.size > 1) assert(generatedFiles.size > 1)
val allText = generatedFiles.map { file => val allText = generatedFiles.map { file =>
FileUtils.readFileToString(file) Files.toString(file, Charset.forName("UTF-8"))
}.mkString("") }.mkString("")
assert(allText === expectedText) assert(allText === expectedText)
generatedFiles generatedFiles