[SPARK-28063][SQL] Replace deprecated .newInstance() in DSv2 Catalogs

## What changes were proposed in this pull request?

This PR aims to replace deprecated `.newInstance()` in DSv2 `Catalogs` and distinguish the plugin class errors more. According to the JDK11 build log, there is no other new instance.
- https://amplab.cs.berkeley.edu/jenkins/view/Spark%20QA%20Test%20(Dashboard)/job/spark-master-test-maven-hadoop-2.7-jdk-11-ubuntu-testing/978/consoleFull

SPARK-25984 removes all instances of the deprecated `.newInstance()` usages at Nov 10, 2018, but this was added at SPARK-24252 on March 8, 2019.

## How was this patch tested?

Pass the Jenkins with the updated test case.

Closes #24882 from dongjoon-hyun/SPARK-28063.

Authored-by: Dongjoon Hyun <dhyun@apple.com>
Signed-off-by: Dongjoon Hyun <dhyun@apple.com>
This commit is contained in:
Dongjoon Hyun 2019-06-16 19:58:02 -07:00
parent 5ae1a6bf0d
commit d6a479b1f8
2 changed files with 18 additions and 4 deletions

View file

@ -23,6 +23,7 @@ import org.apache.spark.sql.internal.SQLConf;
import org.apache.spark.sql.util.CaseInsensitiveStringMap;
import org.apache.spark.util.Utils;
import java.lang.reflect.InvocationTargetException;
import java.util.HashMap;
import java.util.Map;
import java.util.regex.Matcher;
@ -66,7 +67,8 @@ public class Catalogs {
name, pluginClassName));
}
CatalogPlugin plugin = CatalogPlugin.class.cast(pluginClass.newInstance());
CatalogPlugin plugin =
CatalogPlugin.class.cast(pluginClass.getDeclaredConstructor().newInstance());
plugin.initialize(name, catalogOptions(name, conf));
@ -76,6 +78,11 @@ public class Catalogs {
throw new SparkException(String.format(
"Cannot find catalog plugin class for catalog '%s': %s", name, pluginClassName));
} catch (NoSuchMethodException e) {
throw new SparkException(String.format(
"Failed to find public no-arg constructor for catalog '%s': %s", name, pluginClassName),
e);
} catch (IllegalAccessException e) {
throw new SparkException(String.format(
"Failed to call public no-arg constructor for catalog '%s': %s", name, pluginClassName),
@ -83,7 +90,12 @@ public class Catalogs {
} catch (InstantiationException e) {
throw new SparkException(String.format(
"Failed while instantiating plugin for catalog '%s': %s", name, pluginClassName),
"Cannot instantiate abstract catalog plugin class for catalog '%s': %s", name,
pluginClassName), e.getCause());
} catch (InvocationTargetException e) {
throw new SparkException(String.format(
"Failed during instantiating constructor for catalog '%s': %s", name, pluginClassName),
e.getCause());
}
}

View file

@ -113,10 +113,12 @@ public class CatalogLoadingSuite {
String invalidClassName = ConstructorFailureCatalogPlugin.class.getCanonicalName();
conf.setConfString("spark.sql.catalog.invalid", invalidClassName);
RuntimeException exc = intercept(RuntimeException.class, () -> Catalogs.load("invalid", conf));
SparkException exc = intercept(SparkException.class, () -> Catalogs.load("invalid", conf));
Assert.assertTrue("Should identify the constructor error",
exc.getMessage().contains("Failed during instantiating constructor for catalog"));
Assert.assertTrue("Should have expected error message",
exc.getMessage().contains("Expected failure"));
exc.getCause().getMessage().contains("Expected failure"));
}
@Test