aboutsummaryrefslogtreecommitdiff
path: root/contrib/storage-hive/core/src/test/java/org/apache/drill/exec/store/hive/HiveTestDataGenerator.java
diff options
context:
space:
mode:
Diffstat (limited to 'contrib/storage-hive/core/src/test/java/org/apache/drill/exec/store/hive/HiveTestDataGenerator.java')
-rw-r--r--contrib/storage-hive/core/src/test/java/org/apache/drill/exec/store/hive/HiveTestDataGenerator.java82
1 files changed, 61 insertions, 21 deletions
diff --git a/contrib/storage-hive/core/src/test/java/org/apache/drill/exec/store/hive/HiveTestDataGenerator.java b/contrib/storage-hive/core/src/test/java/org/apache/drill/exec/store/hive/HiveTestDataGenerator.java
index 78e5b393a..f20699998 100644
--- a/contrib/storage-hive/core/src/test/java/org/apache/drill/exec/store/hive/HiveTestDataGenerator.java
+++ b/contrib/storage-hive/core/src/test/java/org/apache/drill/exec/store/hive/HiveTestDataGenerator.java
@@ -21,6 +21,7 @@ import java.io.File;
import java.io.IOException;
import java.io.PrintWriter;
import java.nio.file.Files;
+import java.nio.file.Paths;
import java.nio.file.attribute.PosixFilePermission;
import java.sql.Date;
import java.sql.Timestamp;
@@ -31,6 +32,7 @@ import com.google.common.collect.Sets;
import com.google.common.io.Resources;
import org.apache.commons.io.FileUtils;
import org.apache.commons.lang3.StringUtils;
+import org.apache.drill.test.BaseDirTestWatcher;
import org.apache.drill.test.BaseTestQuery;
import org.apache.drill.common.exceptions.DrillException;
import org.apache.drill.exec.store.StoragePluginRegistry;
@@ -52,9 +54,11 @@ public class HiveTestDataGenerator {
private final String dbDir;
private final String whDir;
+ private final BaseDirTestWatcher dirTestWatcher;
private final Map<String, String> config;
- public static synchronized HiveTestDataGenerator getInstance(File baseDir) throws Exception {
+ public static synchronized HiveTestDataGenerator getInstance(BaseDirTestWatcher dirTestWatcher) throws Exception {
+ File baseDir = dirTestWatcher.getRootDir();
if (instance == null || !HiveTestDataGenerator.baseDir.equals(baseDir)) {
HiveTestDataGenerator.baseDir = baseDir;
@@ -64,19 +68,20 @@ public class HiveTestDataGenerator {
final String dbDir = dbDirFile.getAbsolutePath();
final String whDir = whDirFile.getAbsolutePath();
- instance = new HiveTestDataGenerator(dbDir, whDir);
+ instance = new HiveTestDataGenerator(dbDir, whDir, dirTestWatcher);
instance.generateTestData();
}
return instance;
}
- private HiveTestDataGenerator(final String dbDir, final String whDir) {
+ private HiveTestDataGenerator(final String dbDir, final String whDir, final BaseDirTestWatcher dirTestWatcher) {
this.dbDir = dbDir;
this.whDir = whDir;
+ this.dirTestWatcher = dirTestWatcher;
config = Maps.newHashMap();
- config.put("hive.metastore.uris", "");
+ config.put(ConfVars.METASTOREURIS.toString(), "");
config.put("javax.jdo.option.ConnectionURL", String.format("jdbc:derby:;databaseName=%s;create=true", dbDir));
config.put("hive.metastore.warehouse.dir", whDir);
config.put(FileSystem.FS_DEFAULT_NAME_KEY, FileSystem.DEFAULT_FS);
@@ -128,7 +133,7 @@ public class HiveTestDataGenerator {
try {
Files.setPosixFilePermissions(dir.toPath(), perms);
} catch (IOException e) {
- new RuntimeException(e);
+ throw new RuntimeException(e);
}
return dir;
@@ -494,22 +499,6 @@ public class HiveTestDataGenerator {
executeQuery(hiveDriver, "INSERT INTO TABLE kv_parquet PARTITION(part1) SELECT key, value, key FROM default.kv");
executeQuery(hiveDriver, "ALTER TABLE kv_parquet ADD COLUMNS (newcol string)");
- executeQuery(hiveDriver,
- "CREATE TABLE countStar_Parquet (int_field INT) STORED AS parquet");
-
- final int numOfRows = 200;
- final StringBuffer sb = new StringBuffer();
- sb.append("VALUES ");
- for(int i = 0; i < numOfRows; ++i) {
- if(i != 0) {
- sb.append(",");
- }
- sb.append("(").append(i).append(")");
- }
-
- executeQuery(hiveDriver, "INSERT INTO TABLE countStar_Parquet \n" +
- sb.toString());
-
// Create a StorageHandler based table (DRILL-3739)
executeQuery(hiveDriver, "CREATE TABLE kv_sh(key INT, value STRING) STORED BY " +
"'org.apache.hadoop.hive.ql.metadata.DefaultStorageHandler'");
@@ -551,9 +540,60 @@ public class HiveTestDataGenerator {
Resources.getResource("simple.json") + "' into table default.simple_json";
executeQuery(hiveDriver, loadData);
+ createTestDataForDrillNativeParquetReaderTests(hiveDriver);
+
ss.close();
}
+ private void createTestDataForDrillNativeParquetReaderTests(Driver hiveDriver) {
+ // Hive managed table that has data qualified for Drill native filter push down
+ executeQuery(hiveDriver, "create table kv_native(key int, sub_key int) stored as parquet");
+ // each insert is created in separate file
+ executeQuery(hiveDriver, "insert into table kv_native values (1, 1), (1, 2)");
+ executeQuery(hiveDriver, "insert into table kv_native values (1, 3), (1, 4)");
+ executeQuery(hiveDriver, "insert into table kv_native values (2, 5), (2, 6)");
+ executeQuery(hiveDriver, "insert into table kv_native values (null, 9), (null, 10)");
+
+ // Hive external table which has three partitions
+
+ // copy external table with data from test resources
+ dirTestWatcher.copyResourceToRoot(Paths.get("external"));
+
+ File external = new File (baseDir, "external");
+ String tableLocation = new File(external, "kv_native_ext").toURI().getPath();
+
+ executeQuery(hiveDriver, String.format("create external table kv_native_ext(key int) " +
+ "partitioned by (part_key int) " +
+ "stored as parquet location '%s'",
+ tableLocation));
+
+ /*
+ DATA:
+ key, part_key
+ 1, 1
+ 2, 1
+ 3, 2
+ 4, 2
+ */
+
+ // add partitions
+
+ // partition in the same location as table
+ String firstPartition = new File(tableLocation, "part_key=1").toURI().getPath();
+ executeQuery(hiveDriver, String.format("alter table kv_native_ext add partition (part_key = '1') " +
+ "location '%s'", firstPartition));
+
+ // partition in different location with table
+ String secondPartition = new File(external, "part_key=2").toURI().getPath();
+ executeQuery(hiveDriver, String.format("alter table kv_native_ext add partition (part_key = '2') " +
+ "location '%s'", secondPartition));
+
+ // add empty partition
+ String thirdPartition = new File(dirTestWatcher.makeSubDir(Paths.get("empty_part")), "part_key=3").toURI().getPath();
+ executeQuery(hiveDriver, String.format("alter table kv_native_ext add partition (part_key = '3') " +
+ "location '%s'", thirdPartition));
+ }
+
private File getTempFile() throws Exception {
return java.nio.file.Files.createTempFile("drill-hive-test", ".txt").toFile();
}