aboutsummaryrefslogtreecommitdiff
path: root/contrib/storage-hive
diff options
context:
space:
mode:
authorVolodymyr Vysotskyi <vvovyk@gmail.com>2018-12-26 20:31:58 +0200
committerGautam Parai <gparai@apache.org>2019-01-03 16:35:30 -0800
commite7558b7909e855d36d5664b93e9b565f8cedca19 (patch)
tree3cbc3218d3d7fd0bcec528d3305df07ceaaec537 /contrib/storage-hive
parent7108f162cd4f18121aa9a8ace76326bd5fbf8264 (diff)
DRILL-6929: Exclude maprfs jar for default profile
closes #1586
Diffstat (limited to 'contrib/storage-hive')
-rw-r--r--contrib/storage-hive/core/pom.xml33
-rw-r--r--contrib/storage-hive/core/scrMapr/main/java/org/apache/drill/exec/planner/sql/logical/ConvertHiveMapRDBJsonScanToDrillMapRDBJsonScan.java (renamed from contrib/storage-hive/core/src/main/java/org/apache/drill/exec/planner/sql/logical/ConvertHiveMapRDBJsonScanToDrillMapRDBJsonScan.java)24
-rw-r--r--contrib/storage-hive/core/src/main/java/org/apache/drill/exec/store/hive/HiveStoragePlugin.java34
3 files changed, 59 insertions, 32 deletions
diff --git a/contrib/storage-hive/core/pom.xml b/contrib/storage-hive/core/pom.xml
index edb6a5051..0203efbf9 100644
--- a/contrib/storage-hive/core/pom.xml
+++ b/contrib/storage-hive/core/pom.xml
@@ -142,11 +142,6 @@
</exclusion>
</exclusions>
</dependency>
- <dependency>
- <groupId>org.apache.drill.contrib</groupId>
- <artifactId>drill-format-mapr</artifactId>
- <version>${project.version}</version>
- </dependency>
</dependencies>
<build>
@@ -175,15 +170,41 @@
<profiles>
<profile>
<id>mapr</id>
+ <build>
+ <plugins>
+ <plugin>
+ <groupId>org.codehaus.mojo</groupId>
+ <artifactId>build-helper-maven-plugin</artifactId>
+ <executions>
+ <execution>
+ <id>add-mapr-sources</id>
+ <phase>generate-sources</phase>
+ <goals>
+ <goal>add-source</goal>
+ </goals>
+ <configuration>
+ <sources>
+ <source>scrMapr/main/java</source>
+ </sources>
+ </configuration>
+ </execution>
+ </executions>
+ </plugin>
+ </plugins>
+ </build>
<dependencies>
<dependency>
+ <groupId>org.apache.drill.contrib</groupId>
+ <artifactId>drill-format-mapr</artifactId>
+ <version>${project.version}</version>
+ </dependency>
+ <dependency>
<groupId>com.tdunning</groupId>
<artifactId>json</artifactId>
</dependency>
<dependency>
<groupId>org.apache.hive</groupId>
<artifactId>hive-maprdb-json-handler</artifactId>
- <scope>runtime</scope>
</dependency>
<dependency>
<groupId>com.mapr.db</groupId>
diff --git a/contrib/storage-hive/core/src/main/java/org/apache/drill/exec/planner/sql/logical/ConvertHiveMapRDBJsonScanToDrillMapRDBJsonScan.java b/contrib/storage-hive/core/scrMapr/main/java/org/apache/drill/exec/planner/sql/logical/ConvertHiveMapRDBJsonScanToDrillMapRDBJsonScan.java
index 4994a7277..b8c267589 100644
--- a/contrib/storage-hive/core/src/main/java/org/apache/drill/exec/planner/sql/logical/ConvertHiveMapRDBJsonScanToDrillMapRDBJsonScan.java
+++ b/contrib/storage-hive/core/scrMapr/main/java/org/apache/drill/exec/planner/sql/logical/ConvertHiveMapRDBJsonScanToDrillMapRDBJsonScan.java
@@ -21,27 +21,26 @@ import org.apache.calcite.plan.RelOptRuleCall;
import org.apache.calcite.rel.type.RelDataType;
import org.apache.calcite.rel.type.RelDataTypeFactory;
import org.apache.calcite.rel.type.RelDataTypeField;
-import org.apache.drill.common.exceptions.UserException;
+import org.apache.drill.common.exceptions.DrillRuntimeException;
import org.apache.drill.common.expression.SchemaPath;
-import org.apache.drill.exec.ExecConstants;
import org.apache.drill.exec.planner.logical.DrillScanRel;
import org.apache.drill.exec.planner.logical.RelOptHelper;
import org.apache.drill.exec.store.StoragePluginOptimizerRule;
import org.apache.drill.exec.store.hive.HiveMetadataProvider;
import org.apache.drill.exec.store.hive.HiveReadEntry;
import org.apache.drill.exec.store.hive.HiveScan;
+import org.apache.drill.exec.store.hive.HiveUtilities;
import org.apache.drill.exec.store.mapr.db.MapRDBFormatPlugin;
import org.apache.drill.exec.store.mapr.db.MapRDBFormatPluginConfig;
import org.apache.drill.exec.store.mapr.db.json.JsonScanSpec;
import org.apache.drill.exec.store.mapr.db.json.JsonTableGroupScan;
+import org.apache.hadoop.hive.maprdb.json.input.HiveMapRDBJsonInputFormat;
import org.ojai.DocumentConstants;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;
-import static org.apache.drill.exec.store.hive.HiveUtilities.nativeReadersRuleMatches;
-
/**
* Convert Hive scan to use Drill's native MapR-DB reader instead of Hive's MapR-DB JSON Handler.
*/
@@ -69,15 +68,7 @@ public class ConvertHiveMapRDBJsonScanToDrillMapRDBJsonScan extends StoragePlugi
*/
@Override
public boolean matches(RelOptRuleCall call) {
- try {
- return nativeReadersRuleMatches(call,
- Class.forName("org.apache.hadoop.hive.maprdb.json.input.HiveMapRDBJsonInputFormat"));
- } catch (ClassNotFoundException e) {
- throw UserException.resourceError(e)
- .message("Current Drill build is not designed for working with Hive MapR-DB tables. " +
- "Please disable \"%s\" option", ExecConstants.HIVE_OPTIMIZE_MAPRDB_JSON_SCAN_WITH_NATIVE_READER)
- .build(logger);
- }
+ return HiveUtilities.nativeReadersRuleMatches(call, HiveMapRDBJsonInputFormat.class);
}
@Override
@@ -110,15 +101,16 @@ public class ConvertHiveMapRDBJsonScanToDrillMapRDBJsonScan extends StoragePlugi
To ensure Drill MapR-DB Json scan will be chosen, reduce Hive scan importance to 0.
*/
call.getPlanner().setImportance(hiveScanRel, 0.0);
- } catch (final Exception e) {
- logger.warn("Failed to convert HiveScan to JsonScanSpec", e);
+ } catch (DrillRuntimeException e) {
+ // TODO: Improve error handling after allowing to throw IOException from StoragePlugin.getFormatPlugin()
+ logger.warn("Failed to convert HiveScan to JsonScanSpec. Fallback to HiveMapR-DB connector.", e);
}
}
/**
* Helper method which creates a DrillScanRel with native Drill HiveScan.
*/
- private DrillScanRel createNativeScanRel(final DrillScanRel hiveScanRel) throws Exception {
+ private DrillScanRel createNativeScanRel(final DrillScanRel hiveScanRel) {
RelDataTypeFactory typeFactory = hiveScanRel.getCluster().getTypeFactory();
HiveScan hiveScan = (HiveScan) hiveScanRel.getGroupScan();
Map<String, String> parameters = hiveScan.getHiveReadEntry().getHiveTableWrapper().getParameters();
diff --git a/contrib/storage-hive/core/src/main/java/org/apache/drill/exec/store/hive/HiveStoragePlugin.java b/contrib/storage-hive/core/src/main/java/org/apache/drill/exec/store/hive/HiveStoragePlugin.java
index a65a69ea4..a8c789dc9 100644
--- a/contrib/storage-hive/core/src/main/java/org/apache/drill/exec/store/hive/HiveStoragePlugin.java
+++ b/contrib/storage-hive/core/src/main/java/org/apache/drill/exec/store/hive/HiveStoragePlugin.java
@@ -27,6 +27,7 @@ import java.util.Set;
import java.util.function.Function;
import java.util.stream.Collectors;
+import org.apache.drill.common.logical.StoragePluginConfig;
import org.apache.drill.shaded.guava.com.google.common.collect.ImmutableSet;
import org.apache.calcite.schema.Schema.TableType;
@@ -41,7 +42,6 @@ import org.apache.drill.common.logical.FormatPluginConfig;
import org.apache.drill.exec.ExecConstants;
import org.apache.drill.exec.ops.OptimizerRulesContext;
import org.apache.drill.exec.physical.base.AbstractGroupScan;
-import org.apache.drill.exec.planner.sql.logical.ConvertHiveMapRDBJsonScanToDrillMapRDBJsonScan;
import org.apache.drill.exec.planner.sql.logical.ConvertHiveParquetScanToDrillParquetScan;
import org.apache.drill.exec.planner.sql.logical.HivePushPartitionFilterIntoScan;
import org.apache.drill.exec.server.DrillbitContext;
@@ -55,8 +55,7 @@ import org.apache.drill.exec.store.hive.schema.HiveSchemaFactory;
import com.fasterxml.jackson.core.type.TypeReference;
import com.fasterxml.jackson.databind.ObjectMapper;
-import org.apache.drill.exec.store.mapr.db.MapRDBFormatPlugin;
-import org.apache.drill.exec.store.mapr.db.MapRDBFormatPluginConfig;
+import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
import org.apache.hadoop.hive.metastore.api.MetaException;
@@ -217,7 +216,14 @@ public class HiveStoragePlugin extends AbstractStoragePlugin {
ruleBuilder.add(ConvertHiveParquetScanToDrillParquetScan.INSTANCE);
}
if (options.getBoolean(ExecConstants.HIVE_OPTIMIZE_MAPRDB_JSON_SCAN_WITH_NATIVE_READER)) {
- ruleBuilder.add(ConvertHiveMapRDBJsonScanToDrillMapRDBJsonScan.INSTANCE);
+ try {
+ Class<?> hiveToDrillMapRDBJsonRuleClass =
+ Class.forName("org.apache.drill.exec.planner.sql.logical.ConvertHiveMapRDBJsonScanToDrillMapRDBJsonScan");
+ ruleBuilder.add((StoragePluginOptimizerRule) hiveToDrillMapRDBJsonRuleClass.getField("INSTANCE").get(null));
+ } catch (ReflectiveOperationException e) {
+ logger.warn("Current Drill build is not designed for working with Hive MapR-DB tables. " +
+ "Please disable {} option", ExecConstants.HIVE_OPTIMIZE_MAPRDB_JSON_SCAN_WITH_NATIVE_READER);
+ }
}
return ruleBuilder.build();
}
@@ -225,13 +231,21 @@ public class HiveStoragePlugin extends AbstractStoragePlugin {
@Override
public FormatPlugin getFormatPlugin(FormatPluginConfig formatConfig) {
// TODO: implement formatCreator similar to FileSystemPlugin formatCreator. DRILL-6621
- if (formatConfig instanceof MapRDBFormatPluginConfig) {
- try {
- return new MapRDBFormatPlugin(HIVE_MAPRDB_FORMAT_PLUGIN_NAME, context, hiveConf, config,
- (MapRDBFormatPluginConfig) formatConfig);
- } catch (IOException e) {
- throw new DrillRuntimeException("The error is occurred while connecting to MapR-DB", e);
+ try {
+ Class<?> mapRDBFormatPluginConfigClass =
+ Class.forName("org.apache.drill.exec.store.mapr.db.MapRDBFormatPluginConfig");
+ Class<?> mapRDBFormatPluginClass =
+ Class.forName("org.apache.drill.exec.store.mapr.db.MapRDBFormatPlugin");
+
+ if (mapRDBFormatPluginConfigClass.isInstance(formatConfig)) {
+ return (FormatPlugin) mapRDBFormatPluginClass.getConstructor(
+ new Class[]{String.class, DrillbitContext.class, Configuration.class,
+ StoragePluginConfig.class, mapRDBFormatPluginConfigClass})
+ .newInstance(
+ new Object[]{HIVE_MAPRDB_FORMAT_PLUGIN_NAME, context, hiveConf, config, formatConfig});
}
+ } catch (ReflectiveOperationException e) {
+ throw new DrillRuntimeException("The error is occurred while connecting to MapR-DB or instantiating mapRDBFormatPlugin", e);
}
throw new DrillRuntimeException(String.format("Hive storage plugin doesn't support usage of %s format plugin",
formatConfig.getClass().getName()));