[SPARK-15053][BUILD] Fix Java Lint errors on Hive-Thriftserver module

## What changes were proposed in this pull request?

This issue fixes or hides 181 Java linter errors introduced by SPARK-14987 which copied hive service code from Hive. We had better clean up these errors before releasing Spark 2.0.

- Fix UnusedImports (15 lines), RedundantModifier (14 lines), SeparatorWrap (9 lines), MethodParamPad (6 lines), FileTabCharacter (5 lines), ArrayTypeStyle (3 lines), ModifierOrder (3 lines), RedundantImport (1 line), CommentsIndentation (1 line), UpperEll (1 line), FallThrough (1 line), OneStatementPerLine (1 line), NewlineAtEndOfFile (1 line) errors.
- Ignore `LineLength` errors under `hive/service/*` (118 lines).
- Ignore `MethodName` error in `PasswdAuthenticationProvider.java` (1 line).
- Ignore `NoFinalizer` error in `ThreadWithGarbageCleanup.java` (1 line).

## How was this patch tested?

After passing Jenkins building, run `dev/lint-java` manually.
```bash
$ dev/lint-java
Checkstyle checks passed.
```

Author: Dongjoon Hyun <dongjoon@apache.org>

Closes #12831 from dongjoon-hyun/SPARK-15053.
This commit is contained in:
Dongjoon Hyun 2016-05-03 12:39:37 +01:00 committed by Sean Owen
parent dfd9723dd3
commit a744457076
30 changed files with 63 additions and 71 deletions

View file

@ -36,4 +36,10 @@
files="src/test/java/org/apache/spark/sql/hive/test/Complex.java"/>
<suppress checks="LineLength"
files="src/main/java/org/apache/spark/examples/JavaLogQuery.java"/>
<suppress checks="LineLength"
files="src/main/java/org/apache/hive/service/*"/>
<suppress checks="MethodName"
files="src/main/java/org/apache/hive/service/auth/PasswdAuthenticationProvider.java"/>
<suppress checks="NoFinalizer"
files="src/main/java/org/apache/hive/service/server/ThreadWithGarbageCleanup.java"/>
</suppressions>

View file

@ -29,7 +29,7 @@ public interface Service {
/**
* Service states
*/
public enum STATE {
enum STATE {
/** Constructed but not initialized */
NOTINITED,

View file

@ -41,4 +41,4 @@ public class ServiceUtils {
}
return endIdx;
}
}
}

View file

@ -56,7 +56,7 @@ public final class HttpAuthUtils {
private static final String COOKIE_CLIENT_USER_NAME = "cu";
private static final String COOKIE_CLIENT_RAND_NUMBER = "rn";
private static final String COOKIE_KEY_VALUE_SEPARATOR = "=";
private final static Set<String> COOKIE_ATTRIBUTES =
private static final Set<String> COOKIE_ATTRIBUTES =
new HashSet<String>(Arrays.asList(COOKIE_CLIENT_USER_NAME, COOKIE_CLIENT_RAND_NUMBER));
/**
@ -93,10 +93,10 @@ public final class HttpAuthUtils {
*/
public static String createCookieToken(String clientUserName) {
StringBuffer sb = new StringBuffer();
sb.append(COOKIE_CLIENT_USER_NAME).append(COOKIE_KEY_VALUE_SEPARATOR).append(clientUserName).
append(COOKIE_ATTR_SEPARATOR);
sb.append(COOKIE_CLIENT_RAND_NUMBER).append(COOKIE_KEY_VALUE_SEPARATOR).
append((new Random(System.currentTimeMillis())).nextLong());
sb.append(COOKIE_CLIENT_USER_NAME).append(COOKIE_KEY_VALUE_SEPARATOR).append(clientUserName)
.append(COOKIE_ATTR_SEPARATOR);
sb.append(COOKIE_CLIENT_RAND_NUMBER).append(COOKIE_KEY_VALUE_SEPARATOR)
.append((new Random(System.currentTimeMillis())).nextLong());
return sb.toString();
}

View file

@ -96,7 +96,7 @@ public final class KerberosSaslHelper {
private final ThriftCLIService service;
private final Server saslServer;
public CLIServiceProcessorFactory(Server saslServer, ThriftCLIService service) {
CLIServiceProcessorFactory(Server saslServer, ThriftCLIService service) {
super(null);
this.service = service;
this.saslServer = saslServer;

View file

@ -25,10 +25,12 @@ import java.util.Map;
* Possible values of SASL quality-of-protection value.
*/
public enum SaslQOP {
AUTH("auth"), // Authentication only.
AUTH_INT("auth-int"), // Authentication and integrity checking by using signatures.
AUTH_CONF("auth-conf"); // Authentication, integrity and confidentiality checking
// by using signatures and encryption.
// Authentication only.
AUTH("auth"),
// Authentication and integrity checking by using signatures.
AUTH_INT("auth-int"),
// Authentication, integrity and confidentiality checking by using signatures and encryption.
AUTH_CONF("auth-conf");
public final String saslQop;

View file

@ -481,8 +481,8 @@ public class CLIService extends CompositeService implements ICLIService {
@Override
public String getDelegationToken(SessionHandle sessionHandle, HiveAuthFactory authFactory,
String owner, String renewer) throws HiveSQLException {
String delegationToken = sessionManager.getSession(sessionHandle).
getDelegationToken(authFactory, owner, renewer);
String delegationToken = sessionManager.getSession(sessionHandle)
.getDelegationToken(authFactory, owner, renewer);
LOG.info(sessionHandle + ": getDelegationToken()");
return delegationToken;
}
@ -490,8 +490,7 @@ public class CLIService extends CompositeService implements ICLIService {
@Override
public void cancelDelegationToken(SessionHandle sessionHandle, HiveAuthFactory authFactory,
String tokenStr) throws HiveSQLException {
sessionManager.getSession(sessionHandle).
cancelDelegationToken(authFactory, tokenStr);
sessionManager.getSession(sessionHandle).cancelDelegationToken(authFactory, tokenStr);
LOG.info(sessionHandle + ": cancelDelegationToken()");
}

View file

@ -111,7 +111,7 @@ public class HiveSQLException extends SQLException {
/**
* Converts current object to a {@link TStatus} object
* @return a {@link TStatus} object
* @return a {@link TStatus} object
*/
public TStatus toTStatus() {
// TODO: convert sqlState, etc.
@ -125,8 +125,8 @@ public class HiveSQLException extends SQLException {
/**
* Converts the specified {@link Exception} object into a {@link TStatus} object
* @param e a {@link Exception} object
* @return a {@link TStatus} object
* @param e a {@link Exception} object
* @return a {@link TStatus} object
*/
public static TStatus toTStatus(Exception e) {
if (e instanceof HiveSQLException) {
@ -155,7 +155,8 @@ public class HiveSQLException extends SQLException {
if (parent != null) {
int n = parent.length - 1;
while (m >= 0 && n >= 0 && trace[m].equals(parent[n])) {
m--; n--;
m--;
n--;
}
}
List<String> detail = enroll(cause, trace, m);

View file

@ -85,6 +85,7 @@ public enum OperationState {
if (OperationState.CLOSED.equals(newState)) {
return;
}
break;
default:
// fall-through
}

View file

@ -130,8 +130,8 @@ public class RowBasedSet implements RowSet {
}
private static class RemovableList<E> extends ArrayList<E> {
public RemovableList() { super(); }
public RemovableList(List<E> rows) { super(rows); }
RemovableList() { super(); }
RemovableList(List<E> rows) { super(rows); }
@Override
public void removeRange(int fromIndex, int toIndex) {
super.removeRange(fromIndex, toIndex);

View file

@ -42,7 +42,7 @@ public class ClassicTableTypeMapping implements TableTypeMapping {
private final Map<String, String> hiveToClientMap = new HashMap<String, String>();
private final Map<String, String> clientToHiveMap = new HashMap<String, String>();
public ClassicTableTypeMapping () {
public ClassicTableTypeMapping() {
hiveToClientMap.put(TableType.MANAGED_TABLE.toString(),
ClassicTableTypes.TABLE.toString());
hiveToClientMap.put(TableType.EXTERNAL_TABLE.toString(),

View file

@ -27,10 +27,8 @@ import java.util.Map;
import java.util.Map.Entry;
import java.util.regex.Pattern;
import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
import org.apache.hadoop.hive.metastore.IMetaStoreClient;
import org.apache.hadoop.hive.metastore.api.Table;
import org.apache.hadoop.hive.ql.plan.HiveOperation;
import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveOperationType;
import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject;
import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject.HivePrivilegeObjectType;

View file

@ -23,7 +23,6 @@ import java.util.List;
import java.util.Set;
import org.apache.hadoop.hive.metastore.IMetaStoreClient;
import org.apache.hadoop.hive.metastore.api.MetaException;
import org.apache.hadoop.hive.ql.exec.FunctionInfo;
import org.apache.hadoop.hive.ql.exec.FunctionRegistry;
import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveOperationType;
@ -103,7 +102,7 @@ public class GetFunctionsOperation extends MetadataOperation {
.getFunctionNames(CLIServiceUtils.patternToRegex(functionName));
for (String functionName : functionNames) {
FunctionInfo functionInfo = FunctionRegistry.getFunctionInfo(functionName);
Object rowData[] = new Object[] {
Object[] rowData = new Object[] {
null, // FUNCTION_CAT
null, // FUNCTION_SCHEM
functionInfo.getDisplayName(), // FUNCTION_NAME

View file

@ -18,16 +18,8 @@
package org.apache.hive.service.cli.operation;
import java.util.ArrayList;
import java.util.List;
import org.apache.hadoop.hive.metastore.IMetaStoreClient;
import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAccessControlException;
import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthzContext;
import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthzPluginException;
import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveOperationType;
import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject;
import org.apache.hadoop.hive.ql.session.SessionState;
import org.apache.hive.service.cli.FetchOrientation;
import org.apache.hive.service.cli.HiveSQLException;
import org.apache.hive.service.cli.OperationState;

View file

@ -44,8 +44,8 @@ public class GetTableTypesOperation extends MetadataOperation {
protected GetTableTypesOperation(HiveSession parentSession) {
super(parentSession, OperationType.GET_TABLE_TYPES);
String tableMappingStr = getParentSession().getHiveConf().
getVar(HiveConf.ConfVars.HIVE_SERVER2_TABLE_TYPE_MAPPING);
String tableMappingStr = getParentSession().getHiveConf()
.getVar(HiveConf.ConfVars.HIVE_SERVER2_TABLE_TYPE_MAPPING);
tableTypeMapping =
TableTypeMappingFactory.getTableTypeMapping(tableMappingStr);
rowSet = RowSetFactory.create(RESULT_SET_SCHEMA, getProtocolVersion());

View file

@ -64,8 +64,8 @@ public class GetTablesOperation extends MetadataOperation {
this.catalogName = catalogName;
this.schemaName = schemaName;
this.tableName = tableName;
String tableMappingStr = getParentSession().getHiveConf().
getVar(HiveConf.ConfVars.HIVE_SERVER2_TABLE_TYPE_MAPPING);
String tableMappingStr = getParentSession().getHiveConf()
.getVar(HiveConf.ConfVars.HIVE_SERVER2_TABLE_TYPE_MAPPING);
tableTypeMapping =
TableTypeMappingFactory.getTableTypeMapping(tableMappingStr);
if (tableTypes != null) {

View file

@ -35,7 +35,7 @@ import org.apache.hive.service.cli.session.HiveSession;
*/
public class GetTypeInfoOperation extends MetadataOperation {
private final static TableSchema RESULT_SET_SCHEMA = new TableSchema()
private static final TableSchema RESULT_SET_SCHEMA = new TableSchema()
.addPrimitiveColumn("TYPE_NAME", Type.STRING_TYPE,
"Type name")
.addPrimitiveColumn("DATA_TYPE", Type.INT_TYPE,

View file

@ -60,15 +60,15 @@ public class LogDivertAppender extends WriterAppender {
/* Patterns that are excluded in verbose logging level.
* Filter out messages coming from log processing classes, or we'll run an infinite loop.
*/
private static final Pattern verboseExcludeNamePattern = Pattern.compile(Joiner.on("|").
join(new String[] {LOG.getName(), OperationLog.class.getName(),
private static final Pattern verboseExcludeNamePattern = Pattern.compile(Joiner.on("|")
.join(new String[] {LOG.getName(), OperationLog.class.getName(),
OperationManager.class.getName()}));
/* Patterns that are included in execution logging level.
* In execution mode, show only select logger messages.
*/
private static final Pattern executionIncludeNamePattern = Pattern.compile(Joiner.on("|").
join(new String[] {"org.apache.hadoop.mapreduce.JobSubmitter",
private static final Pattern executionIncludeNamePattern = Pattern.compile(Joiner.on("|")
.join(new String[] {"org.apache.hadoop.mapreduce.JobSubmitter",
"org.apache.hadoop.mapreduce.Job", "SessionState", Task.class.getName(),
"org.apache.hadoop.hive.ql.exec.spark.status.SparkJobMonitor"}));
@ -88,7 +88,7 @@ public class LogDivertAppender extends WriterAppender {
}
}
public NameFilter(
NameFilter(
OperationLog.LoggingLevel loggingMode, OperationManager op) {
this.operationManager = op;
this.loggingMode = loggingMode;
@ -131,7 +131,7 @@ public class LogDivertAppender extends WriterAppender {
/** This is where the log message will go to */
private final CharArrayWriter writer = new CharArrayWriter();
private void setLayout (boolean isVerbose, Layout lo) {
private void setLayout(boolean isVerbose, Layout lo) {
if (isVerbose) {
if (lo == null) {
lo = CLIServiceUtils.verboseLayout;

View file

@ -18,7 +18,6 @@
package org.apache.hive.service.cli.operation;
import java.util.ArrayList;
import java.util.List;
import org.apache.hadoop.hive.conf.HiveConf;

View file

@ -326,7 +326,7 @@ public class SQLOperation extends ExecuteStatementOperation {
return resultSchema;
}
private transient final List<Object> convey = new ArrayList<Object>();
private final transient List<Object> convey = new ArrayList<Object>();
@Override
public RowSet getNextRowSet(FetchOrientation orientation, long maxRows) throws HiveSQLException {

View file

@ -27,18 +27,18 @@ public interface TableTypeMapping {
* @param clientTypeName
* @return
*/
public String mapToHiveType (String clientTypeName);
String mapToHiveType(String clientTypeName);
/**
* Map hive's table type name to client's table type
* @param clientTypeName
* @return
*/
public String mapToClientType (String hiveTypeName);
String mapToClientType(String hiveTypeName);
/**
* Get all the table types of this mapping
* @return
*/
public Set<String> getTableTypeNames();
Set<String> getTableTypeNames();
}

View file

@ -18,8 +18,6 @@
package org.apache.hive.service.cli.session;
import java.util.Map;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.ql.session.SessionState;
import org.apache.hive.service.cli.SessionHandle;
@ -27,7 +25,6 @@ import org.apache.hive.service.cli.operation.OperationManager;
import org.apache.hive.service.cli.thrift.TProtocolVersion;
import java.io.File;
import java.util.Map;
/**
* Methods that don't need to be executed under a doAs

View file

@ -33,5 +33,5 @@ public interface HiveSessionHook extends Hook {
* @param sessionHookContext context
* @throws HiveSQLException
*/
public void run(HiveSessionHookContext sessionHookContext) throws HiveSQLException;
void run(HiveSessionHookContext sessionHookContext) throws HiveSQLException;
}

View file

@ -30,17 +30,17 @@ public interface HiveSessionHookContext {
* Retrieve session conf
* @return
*/
public HiveConf getSessionConf();
HiveConf getSessionConf();
/**
* The get the username starting the session
* @return
*/
public String getSessionUser();
String getSessionUser();
/**
* Retrieve handle for the session
* @return
*/
public String getSessionHandle();
String getSessionHandle();
}

View file

@ -26,7 +26,6 @@ import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.ql.metadata.Hive;
import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.shims.ShimLoader;
import org.apache.hadoop.hive.shims.Utils;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hive.service.auth.HiveAuthFactory;
@ -83,7 +82,7 @@ public class HiveSessionImplwithUGI extends HiveSessionImpl {
return this.sessionUgi;
}
public String getDelegationToken () {
public String getDelegationToken() {
return this.delegationTokenStr;
}

View file

@ -57,7 +57,7 @@ public class HiveSessionProxy implements InvocationHandler {
return invoke(method, args);
}
return ugi.doAs(
new PrivilegedExceptionAction<Object> () {
new PrivilegedExceptionAction<Object>() {
@Override
public Object run() throws HiveSQLException {
return invoke(method, args);

View file

@ -151,7 +151,7 @@ public class SessionManager extends CompositeService {
}
private void startTimeoutChecker() {
final long interval = Math.max(checkInterval, 3000l); // minimum 3 seconds
final long interval = Math.max(checkInterval, 3000L); // minimum 3 seconds
Runnable timeoutChecker = new Runnable() {
@Override
public void run() {

View file

@ -722,8 +722,8 @@ public abstract class ThriftCLIService extends AbstractService implements TCLISe
}
// If there's no authentication, then directly substitute the user
if (HiveAuthFactory.AuthTypes.NONE.toString().
equalsIgnoreCase(hiveConf.getVar(ConfVars.HIVE_SERVER2_AUTHENTICATION))) {
if (HiveAuthFactory.AuthTypes.NONE.toString()
.equalsIgnoreCase(hiveConf.getVar(ConfVars.HIVE_SERVER2_AUTHENTICATION))) {
return proxyUser;
}

View file

@ -21,7 +21,6 @@ package org.apache.hive.service.cli.thrift;
import java.io.IOException;
import java.io.UnsupportedEncodingException;
import java.security.PrivilegedExceptionAction;
import java.util.Arrays;
import java.util.Map;
import java.util.Random;
import java.util.Set;
@ -241,9 +240,9 @@ public class ThriftHttpServlet extends TServlet {
* Each cookie is of the format [key]=[value]
*/
private String toCookieStr(Cookie[] cookies) {
String cookieStr = "";
String cookieStr = "";
for (Cookie c : cookies) {
for (Cookie c : cookies) {
cookieStr += c.getName() + "=" + c.getValue() + " ;\n";
}
return cookieStr;
@ -458,7 +457,7 @@ public class ThriftHttpServlet extends TServlet {
private String getUsername(HttpServletRequest request, String authType)
throws HttpAuthenticationException {
String creds[] = getAuthHeaderTokens(request, authType);
String[] creds = getAuthHeaderTokens(request, authType);
// Username must be present
if (creds[0] == null || creds[0].isEmpty()) {
throw new HttpAuthenticationException("Authorization header received " +
@ -469,7 +468,7 @@ public class ThriftHttpServlet extends TServlet {
private String getPassword(HttpServletRequest request, String authType)
throws HttpAuthenticationException {
String creds[] = getAuthHeaderTokens(request, authType);
String[] creds = getAuthHeaderTokens(request, authType);
// Password must be present
if (creds[1] == null || creds[1].isEmpty()) {
throw new HttpAuthenticationException("Authorization header received " +

View file

@ -236,8 +236,8 @@ public class HiveServer2 extends CompositeService {
/**
* The executor interface for running the appropriate HiveServer2 command based on parsed options
*/
static interface ServerOptionsExecutor {
public void execute();
interface ServerOptionsExecutor {
void execute();
}
/**