[SPARK-14756][CORE] Use parseLong instead of valueOf

## What changes were proposed in this pull request?

Use Long.parseLong which returns a primative.
Use a series of appends() reduces the creation of an extra StringBuilder type

## How was this patch tested?

Unit tests

Author: Azeem Jiva <azeemj@gmail.com>

Closes #12520 from javawithjiva/minor.
This commit is contained in:
Azeem Jiva 2016-04-26 11:49:04 +01:00 committed by Sean Owen
parent f70e4fff0e
commit de6e633420
5 changed files with 13 additions and 13 deletions

View file

@ -62,7 +62,7 @@ public final class CalendarInterval implements Serializable {
if (s == null) {
return 0;
} else {
return Long.valueOf(s);
return Long.parseLong(s);
}
}
@ -91,7 +91,7 @@ public final class CalendarInterval implements Serializable {
String s, long minValue, long maxValue) throws IllegalArgumentException {
long result = 0;
if (s != null) {
result = Long.valueOf(s);
result = Long.parseLong(s);
if (result < minValue || result > maxValue) {
throw new IllegalArgumentException(String.format("%s %d outside range [%d, %d]",
fieldName, result, minValue, maxValue));
@ -218,7 +218,7 @@ public final class CalendarInterval implements Serializable {
result = new CalendarInterval(0, millisecond * MICROS_PER_MILLI);
} else if (unit.equals("microsecond")) {
long micros = Long.valueOf(m.group(1));
long micros = Long.parseLong(m.group(1));
result = new CalendarInterval(0, micros);
}
} catch (Exception e) {
@ -318,7 +318,7 @@ public final class CalendarInterval implements Serializable {
private void appendUnit(StringBuilder sb, long value, String unit) {
if (value != 0) {
sb.append(" " + value + " " + unit + "s");
sb.append(' ').append(value).append(' ').append(unit).append('s');
}
}
}

View file

@ -384,7 +384,7 @@ object SparkHadoopUtil {
def get: SparkHadoopUtil = {
// Check each time to support changing to/from YARN
val yarnMode = java.lang.Boolean.valueOf(
val yarnMode = java.lang.Boolean.parseBoolean(
System.getProperty("SPARK_YARN_MODE", System.getenv("SPARK_YARN_MODE")))
if (yarnMode) {
yarn

View file

@ -66,8 +66,8 @@ public class JavaStreamingTestExample {
}
String dataDir = args[0];
Duration batchDuration = Seconds.apply(Long.valueOf(args[1]));
int numBatchesTimeout = Integer.valueOf(args[2]);
Duration batchDuration = Seconds.apply(Long.parseLong(args[1]));
int numBatchesTimeout = Integer.parseInt(args[2]);
SparkConf conf = new SparkConf().setMaster("local").setAppName("StreamingTestExample");
JavaStreamingContext ssc = new JavaStreamingContext(conf, batchDuration);
@ -80,8 +80,8 @@ public class JavaStreamingTestExample {
@Override
public BinarySample call(String line) {
String[] ts = line.split(",");
boolean label = Boolean.valueOf(ts[0]);
double value = Double.valueOf(ts[1]);
boolean label = Boolean.parseBoolean(ts[0]);
double value = Double.parseDouble(ts[1]);
return new BinarySample(label, value);
}
});

View file

@ -441,9 +441,9 @@ private[sql] object PartitioningUtils {
val c = path.charAt(i)
if (c == '%' && i + 2 < path.length) {
val code: Int = try {
Integer.valueOf(path.substring(i + 1, i + 3), 16)
} catch { case e: Exception =>
-1: Integer
Integer.parseInt(path.substring(i + 1, i + 3), 16)
} catch {
case _: Exception => -1
}
if (code >= 0) {
sb.append(code.asInstanceOf[Char])

View file

@ -345,7 +345,7 @@ object YarnSparkHadoopUtil {
val RM_REQUEST_PRIORITY = Priority.newInstance(1)
def get: YarnSparkHadoopUtil = {
val yarnMode = java.lang.Boolean.valueOf(
val yarnMode = java.lang.Boolean.parseBoolean(
System.getProperty("SPARK_YARN_MODE", System.getenv("SPARK_YARN_MODE")))
if (!yarnMode) {
throw new SparkException("YarnSparkHadoopUtil is not available in non-YARN mode!")