aboutsummaryrefslogtreecommitdiff
path: root/contrib/storage-hive/core/src/test/java/org/apache/drill/exec/hive/TestHiveStorage.java
diff options
context:
space:
mode:
Diffstat (limited to 'contrib/storage-hive/core/src/test/java/org/apache/drill/exec/hive/TestHiveStorage.java')
-rw-r--r--contrib/storage-hive/core/src/test/java/org/apache/drill/exec/hive/TestHiveStorage.java215
1 files changed, 26 insertions, 189 deletions
diff --git a/contrib/storage-hive/core/src/test/java/org/apache/drill/exec/hive/TestHiveStorage.java b/contrib/storage-hive/core/src/test/java/org/apache/drill/exec/hive/TestHiveStorage.java
index 4da22b6a3..1b7774878 100644
--- a/contrib/storage-hive/core/src/test/java/org/apache/drill/exec/hive/TestHiveStorage.java
+++ b/contrib/storage-hive/core/src/test/java/org/apache/drill/exec/hive/TestHiveStorage.java
@@ -31,8 +31,10 @@ import org.apache.hadoop.hive.common.type.HiveVarchar;
import org.joda.time.DateTime;
import org.junit.AfterClass;
import org.junit.BeforeClass;
+import org.junit.Rule;
import org.junit.Test;
import org.junit.experimental.categories.Category;
+import org.junit.rules.ExpectedException;
import java.math.BigDecimal;
import java.sql.Date;
@@ -42,38 +44,25 @@ import java.util.Map;
import static org.hamcrest.CoreMatchers.containsString;
import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
-import static org.junit.Assert.assertThat;
import static org.junit.Assert.assertTrue;
@Category({SlowTest.class, HiveStorageTest.class})
public class TestHiveStorage extends HiveTestBase {
+
@BeforeClass
- public static void setupOptions() throws Exception {
- test(String.format("alter session set `%s` = true", PlannerSettings.ENABLE_DECIMAL_DATA_TYPE_KEY));
+ public static void init() {
+ setSessionOption(PlannerSettings.ENABLE_DECIMAL_DATA_TYPE_KEY, true);
}
- @Test // DRILL-4083
- public void testNativeScanWhenNoColumnIsRead() throws Exception {
- try {
- test(String.format("alter session set `%s` = true", ExecConstants.HIVE_OPTIMIZE_SCAN_WITH_NATIVE_READERS));
-
- String query = "SELECT count(*) as col FROM hive.countStar_Parquet";
- testPhysicalPlan(query, "hive-drill-native-parquet-scan");
-
- testBuilder()
- .sqlQuery(query)
- .unOrdered()
- .baselineColumns("col")
- .baselineValues(200L)
- .go();
- } finally {
- test("alter session reset `%s`",
- ExecConstants.HIVE_OPTIMIZE_SCAN_WITH_NATIVE_READERS);
- }
+ @AfterClass
+ public static void cleanup() {
+ resetSessionOption(PlannerSettings.ENABLE_DECIMAL_DATA_TYPE_KEY);
}
+ @Rule
+ public ExpectedException thrown = ExpectedException.none();
+
@Test
public void hiveReadWithDb() throws Exception {
test("select * from hive.kv");
@@ -206,123 +195,6 @@ public class TestHiveStorage extends HiveTestBase {
.go();
}
- /**
- * Test to ensure Drill reads the all supported types through native Parquet readers.
- * NOTE: As part of Hive 1.2 upgrade, make sure this test and {@link #readAllSupportedHiveDataTypes()} are merged
- * into one test.
- */
- @Test
- public void readAllSupportedHiveDataTypesNativeParquet() throws Exception {
- try {
- test(String.format("alter session set `%s` = true", ExecConstants.HIVE_OPTIMIZE_SCAN_WITH_NATIVE_READERS));
- final String query = "SELECT * FROM hive.readtest_parquet";
-
- // Make sure the plan has Hive scan with native parquet reader
- testPhysicalPlan(query, "hive-drill-native-parquet-scan");
-
- testBuilder().sqlQuery(query)
- .unOrdered()
- .baselineColumns(
- "binary_field",
- "boolean_field",
- "tinyint_field",
- "decimal0_field",
- "decimal9_field",
- "decimal18_field",
- "decimal28_field",
- "decimal38_field",
- "double_field",
- "float_field",
- "int_field",
- "bigint_field",
- "smallint_field",
- "string_field",
- "varchar_field",
- "timestamp_field",
- "char_field",
- // There is a regression in Hive 1.2.1 in binary and boolean partition columns. Disable for now.
- //"binary_part",
- "boolean_part",
- "tinyint_part",
- "decimal0_part",
- "decimal9_part",
- "decimal18_part",
- "decimal28_part",
- "decimal38_part",
- "double_part",
- "float_part",
- "int_part",
- "bigint_part",
- "smallint_part",
- "string_part",
- "varchar_part",
- "timestamp_part",
- "date_part",
- "char_part")
- .baselineValues(
- "binaryfield".getBytes(),
- false,
- 34,
- new BigDecimal("66"),
- new BigDecimal("2347.92"),
- new BigDecimal("2758725827.99990"),
- new BigDecimal("29375892739852.8"),
- new BigDecimal("89853749534593985.783"),
- 8.345d,
- 4.67f,
- 123456,
- 234235L,
- 3455,
- "stringfield",
- "varcharfield",
- new DateTime(Timestamp.valueOf("2013-07-05 17:01:00").getTime()),
- "charfield",
- // There is a regression in Hive 1.2.1 in binary and boolean partition columns. Disable for now.
- //"binary",
- true,
- 64,
- new BigDecimal("37"),
- new BigDecimal("36.90"),
- new BigDecimal("3289379872.94565"),
- new BigDecimal("39579334534534.4"),
- new BigDecimal("363945093845093890.900"),
- 8.345d,
- 4.67f,
- 123456,
- 234235L,
- 3455,
- "string",
- "varchar",
- new DateTime(Timestamp.valueOf("2013-07-05 17:01:00").getTime()),
- new DateTime(Date.valueOf("2013-07-05").getTime()),
- "char")
- .baselineValues( // All fields are null, but partition fields have non-null values
- null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null,
- // There is a regression in Hive 1.2.1 in binary and boolean partition columns. Disable for now.
- //"binary",
- true,
- 64,
- new BigDecimal("37"),
- new BigDecimal("36.90"),
- new BigDecimal("3289379872.94565"),
- new BigDecimal("39579334534534.4"),
- new BigDecimal("363945093845093890.900"),
- 8.345d,
- 4.67f,
- 123456,
- 234235L,
- 3455,
- "string",
- "varchar",
- new DateTime(Timestamp.valueOf("2013-07-05 17:01:00").getTime()),
- new DateTime(Date.valueOf("2013-07-05").getTime()),
- "char")
- .go();
- } finally {
- test(String.format("alter session set `%s` = false", ExecConstants.HIVE_OPTIMIZE_SCAN_WITH_NATIVE_READERS));
- }
- }
-
@Test
public void orderByOnHiveTable() throws Exception {
testBuilder()
@@ -402,19 +274,7 @@ public class TestHiveStorage extends HiveTestBase {
.go();
}
- @Test // DRILL-3938
- public void nativeReaderIsDisabledForAlteredPartitionedTable() throws Exception {
- try {
- test(String.format("alter session set `%s` = true", ExecConstants.HIVE_OPTIMIZE_SCAN_WITH_NATIVE_READERS));
- final String query = "EXPLAIN PLAN FOR SELECT key, `value`, newcol FROM hive.kv_parquet ORDER BY key LIMIT 1";
- // Make sure the HiveScan in plan has no native parquet reader
- final String planStr = getPlanInString(query, JSON_FORMAT);
- assertFalse("Hive native is not expected in the plan", planStr.contains("hive-drill-native-parquet-scan"));
- } finally {
- test(String.format("alter session set `%s` = false", ExecConstants.HIVE_OPTIMIZE_SCAN_WITH_NATIVE_READERS));
- }
- }
@Test // DRILL-3739
public void readingFromStorageHandleBasedTable() throws Exception {
@@ -426,22 +286,6 @@ public class TestHiveStorage extends HiveTestBase {
.go();
}
- @Test // DRILL-3739
- public void readingFromStorageHandleBasedTable2() throws Exception {
- try {
- test(String.format("alter session set `%s` = true", ExecConstants.HIVE_OPTIMIZE_SCAN_WITH_NATIVE_READERS));
-
- testBuilder()
- .sqlQuery("SELECT * FROM hive.kv_sh ORDER BY key LIMIT 2")
- .ordered()
- .baselineColumns("key", "value")
- .expectsEmptyResultSet()
- .go();
- } finally {
- test(String.format("alter session set `%s` = false", ExecConstants.HIVE_OPTIMIZE_SCAN_WITH_NATIVE_READERS));
- }
- }
-
@Test // DRILL-3688
public void readingFromSmallTableWithSkipHeaderAndFooter() throws Exception {
testBuilder()
@@ -480,23 +324,20 @@ public class TestHiveStorage extends HiveTestBase {
.go();
}
- @Test // DRILL-3688
- public void testIncorrectHeaderFooterProperty() throws Exception {
- Map<String, String> testData = ImmutableMap.<String, String>builder()
- .put("hive.skipper.kv_incorrect_skip_header","skip.header.line.count")
- .put("hive.skipper.kv_incorrect_skip_footer", "skip.footer.line.count")
- .build();
-
- String query = "select * from %s";
- String exceptionMessage = "Hive table property %s value 'A' is non-numeric";
-
- for (Map.Entry<String, String> entry : testData.entrySet()) {
- try {
- test(String.format(query, entry.getKey()));
- } catch (UserRemoteException e) {
- assertThat(e.getMessage(), containsString(String.format(exceptionMessage, entry.getValue())));
- }
- }
+ @Test
+ public void testIncorrectHeaderProperty() throws Exception {
+ String query = "select * from hive.skipper.kv_incorrect_skip_header";
+ thrown.expect(UserRemoteException.class);
+ thrown.expectMessage(containsString("Hive table property skip.header.line.count value 'A' is non-numeric"));
+ test(query);
+ }
+
+ @Test
+ public void testIncorrectFooterProperty() throws Exception {
+ String query = "select * from hive.skipper.kv_incorrect_skip_footer";
+ thrown.expect(UserRemoteException.class);
+ thrown.expectMessage(containsString("Hive table property skip.footer.line.count value 'A' is non-numeric"));
+ test(query);
}
@Test
@@ -571,6 +412,7 @@ public class TestHiveStorage extends HiveTestBase {
@Test
public void testPhysicalPlanSubmission() throws Exception {
PlanTestBase.testPhysicalPlanExecutionBasedOnQuery("select * from hive.kv");
+ PlanTestBase.testPhysicalPlanExecutionBasedOnQuery("select * from hive.readtest");
}
private void verifyColumnsMetadata(List<UserProtos.ResultColumnMetadata> columnsList, Map<String, Integer> expectedResult) {
@@ -583,9 +425,4 @@ public class TestHiveStorage extends HiveTestBase {
assertTrue("Column should be nullable", columnMetadata.getIsNullable());
}
}
-
- @AfterClass
- public static void shutdownOptions() throws Exception {
- test(String.format("alter session set `%s` = false", PlannerSettings.ENABLE_DECIMAL_DATA_TYPE_KEY));
- }
}