aboutsummaryrefslogtreecommitdiff
path: root/bigtop-tests
diff options
context:
space:
mode:
authorRoman Shaposhnik <rvs@apache.org>2017-03-22 09:49:53 -0700
committerRoman Shaposhnik <rvs@apache.org>2017-03-23 10:27:17 -0700
commita05d3813f67979f74c0494fb118f98a0264266dc (patch)
tree4d511c25b36b1b0f06bd9c747a4f974ab05c18c0 /bigtop-tests
parent0f51fb32531daa6c0b66e6bbd8fc9813023019ba (diff)
BIGTOP-2704. Include ODPi runtime tests option into the battery of smoke tests
IDEA code reformating
Diffstat (limited to 'bigtop-tests')
-rw-r--r--bigtop-tests/smoke-tests/odpi-runtime/src/main/java/org/odpi/specs/runtime/hadoop/ApiExaminer.java732
-rw-r--r--bigtop-tests/smoke-tests/odpi-runtime/src/main/java/org/odpi/specs/runtime/hive/HCatalogMR.java173
-rw-r--r--bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/HiveHelper.java158
-rw-r--r--bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/JdbcConnector.java78
-rw-r--r--bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/TestBeeline.java364
-rw-r--r--bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/TestCLI.java387
-rw-r--r--bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/TestHCatalog.java202
-rw-r--r--bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/TestJdbc.java814
-rw-r--r--bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/TestSql.java524
-rw-r--r--bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/TestThrift.java396
10 files changed, 1941 insertions, 1887 deletions
diff --git a/bigtop-tests/smoke-tests/odpi-runtime/src/main/java/org/odpi/specs/runtime/hadoop/ApiExaminer.java b/bigtop-tests/smoke-tests/odpi-runtime/src/main/java/org/odpi/specs/runtime/hadoop/ApiExaminer.java
index a8febdbe..77db1b5a 100644
--- a/bigtop-tests/smoke-tests/odpi-runtime/src/main/java/org/odpi/specs/runtime/hadoop/ApiExaminer.java
+++ b/bigtop-tests/smoke-tests/odpi-runtime/src/main/java/org/odpi/specs/runtime/hadoop/ApiExaminer.java
@@ -48,442 +48,444 @@ import java.util.regex.Pattern;
*/
public class ApiExaminer {
- private static final Log LOG = LogFactory.getLog(ApiExaminer.class.getName());
-
- static private Set<String> unloadableClasses;
-
- private List<String> errors;
- private List<String> warnings;
-
- static {
- unloadableClasses = new HashSet<>();
- unloadableClasses.add("org.apache.hadoop.security.JniBasedUnixGroupsMapping");
- unloadableClasses.add("org.apache.hadoop.security.JniBasedUnixGroupsNetgroupMapping");
- unloadableClasses.add("org.apache.hadoop.io.compress.lz4.Lz4Compressor");
- unloadableClasses.add("org.apache.hadoop.record.compiler.ant.RccTask");
-
- }
-
- public static void main(String[] args) {
- Options options = new Options();
-
- options.addOption("c", "compare", true,
- "Compare against a spec, argument is the json file containing spec");
- options.addOption("h", "help", false, "You're looking at it");
- options.addOption("j", "jar", true, "Jar to examine");
- options.addOption("p", "prepare-spec", true,
- "Prepare the spec, argument is the directory to write the spec to");
-
- try {
- CommandLine cli = new GnuParser().parse(options, args);
-
- if (cli.hasOption('h')) {
- usage(options);
- return;
- }
-
- if ((!cli.hasOption('c') && !cli.hasOption('p')) ||
- (cli.hasOption('c') && cli.hasOption('p'))) {
- System.err.println("You must choose either -c or -p");
- usage(options);
- return;
- }
-
- if (!cli.hasOption('j')) {
- System.err.println("You must specify the jar to prepare or compare");
- usage(options);
- return;
- }
-
- String jar = cli.getOptionValue('j');
- ApiExaminer examiner = new ApiExaminer();
-
- if (cli.hasOption('c')) {
- examiner.compareAgainstStandard(cli.getOptionValue('c'), jar);
- } else if (cli.hasOption('p')) {
- examiner.prepareExpected(jar, cli.getOptionValue('p'));
- }
- } catch (Exception e) {
- System.err.println("Received exception while processing");
- e.printStackTrace();
- }
- }
-
- private static void usage(Options options) {
- HelpFormatter help = new HelpFormatter();
- help.printHelp("api-examiner", options);
-
- }
-
- private ApiExaminer() {
- }
-
- private void prepareExpected(String jarFile, String outputDir) throws IOException,
- ClassNotFoundException {
- JarInfo jarInfo = new JarInfo(jarFile, this);
- jarInfo.dumpToFile(new File(outputDir));
- }
-
- private void compareAgainstStandard(String json, String jarFile) throws IOException,
- ClassNotFoundException {
- errors = new ArrayList<>();
- warnings = new ArrayList<>();
- JarInfo underTest = new JarInfo(jarFile, this);
- JarInfo standard = jarInfoFromFile(new File(json));
- standard.compareAndReport(underTest);
-
- if (errors.size() > 0) {
- System.err.println("Found " + errors.size() + " incompatibilities:");
- for (String error : errors) {
- System.err.println(error);
- }
- }
+ private static final Log LOG = LogFactory.getLog(ApiExaminer.class.getName());
+
+ static private Set<String> unloadableClasses;
+
+ private List<String> errors;
+ private List<String> warnings;
+
+ static {
+ unloadableClasses = new HashSet<>();
+ unloadableClasses.add("org.apache.hadoop.security.JniBasedUnixGroupsMapping");
+ unloadableClasses.add("org.apache.hadoop.security.JniBasedUnixGroupsNetgroupMapping");
+ unloadableClasses.add("org.apache.hadoop.io.compress.lz4.Lz4Compressor");
+ unloadableClasses.add("org.apache.hadoop.record.compiler.ant.RccTask");
- if (warnings.size() > 0) {
- System.err.println("Found " + warnings.size() + " possible issues: ");
- for (String warning : warnings) {
- System.err.println(warning);
- }
}
+ public static void main(String[] args) {
+ Options options = new Options();
+
+ options.addOption("c", "compare", true,
+ "Compare against a spec, argument is the json file containing spec");
+ options.addOption("h", "help", false, "You're looking at it");
+ options.addOption("j", "jar", true, "Jar to examine");
+ options.addOption("p", "prepare-spec", true,
+ "Prepare the spec, argument is the directory to write the spec to");
- }
+ try {
+ CommandLine cli = new GnuParser().parse(options, args);
- private JarInfo jarInfoFromFile(File inputFile) throws IOException {
- ObjectMapper mapper = new ObjectMapper();
- JarInfo jarInfo = mapper.readValue(inputFile, JarInfo.class);
- jarInfo.patchUpClassBackPointers(this);
- return jarInfo;
- }
+ if (cli.hasOption('h')) {
+ usage(options);
+ return;
+ }
- private static class JarInfo {
- String name;
- String version;
- ApiExaminer container;
- Map<String, ClassInfo> classes;
+ if ((!cli.hasOption('c') && !cli.hasOption('p')) ||
+ (cli.hasOption('c') && cli.hasOption('p'))) {
+ System.err.println("You must choose either -c or -p");
+ usage(options);
+ return;
+ }
- // For use by Jackson
- public JarInfo() {
+ if (!cli.hasOption('j')) {
+ System.err.println("You must specify the jar to prepare or compare");
+ usage(options);
+ return;
+ }
- }
+ String jar = cli.getOptionValue('j');
+ ApiExaminer examiner = new ApiExaminer();
- JarInfo(String jarFile, ApiExaminer container) throws IOException, ClassNotFoundException {
- this.container = container;
- LOG.info("Processing jar " + jarFile);
- File f = new File(jarFile);
- Pattern pattern = Pattern.compile("(hadoop-[a-z\\-]+)-([0-9]\\.[0-9]\\.[0-9]).*");
- Matcher matcher = pattern.matcher(f.getName());
- if (!matcher.matches()) {
- String msg = "Unable to determine name and version from " + f.getName();
- LOG.error(msg);
- throw new RuntimeException(msg);
- }
- name = matcher.group(1);
- version = matcher.group(2);
- classes = new HashMap<>();
-
- JarFile jar = new JarFile(jarFile);
- Enumeration<JarEntry> entries = jar.entries();
- while (entries.hasMoreElements()) {
- String name = entries.nextElement().getName();
- if (name.endsWith(".class")) {
- name = name.substring(0, name.length() - 6);
- name = name.replace('/', '.');
- if (!unloadableClasses.contains(name)) {
- LOG.debug("Processing class " + name);
- Class<?> clazz = Class.forName(name);
- if (clazz.getAnnotation(InterfaceAudience.Public.class) != null &&
- clazz.getAnnotation(InterfaceStability.Stable.class) != null) {
- classes.put(name, new ClassInfo(this, clazz));
+ if (cli.hasOption('c')) {
+ examiner.compareAgainstStandard(cli.getOptionValue('c'), jar);
+ } else if (cli.hasOption('p')) {
+ examiner.prepareExpected(jar, cli.getOptionValue('p'));
}
- }
+ } catch (Exception e) {
+ System.err.println("Received exception while processing");
+ e.printStackTrace();
}
- }
}
- public String getName() {
- return name;
- }
+ private static void usage(Options options) {
+ HelpFormatter help = new HelpFormatter();
+ help.printHelp("api-examiner", options);
- public void setName(String name) {
- this.name = name;
}
- public String getVersion() {
- return version;
+ private ApiExaminer() {
}
- public void setVersion(String version) {
- this.version = version;
+ private void prepareExpected(String jarFile, String outputDir) throws IOException,
+ ClassNotFoundException {
+ JarInfo jarInfo = new JarInfo(jarFile, this);
+ jarInfo.dumpToFile(new File(outputDir));
}
- public Map<String, ClassInfo> getClasses() {
- return classes;
+ private void compareAgainstStandard(String json, String jarFile) throws IOException,
+ ClassNotFoundException {
+ errors = new ArrayList<>();
+ warnings = new ArrayList<>();
+ JarInfo underTest = new JarInfo(jarFile, this);
+ JarInfo standard = jarInfoFromFile(new File(json));
+ standard.compareAndReport(underTest);
+
+ if (errors.size() > 0) {
+ System.err.println("Found " + errors.size() + " incompatibilities:");
+ for (String error : errors) {
+ System.err.println(error);
+ }
+ }
+
+ if (warnings.size() > 0) {
+ System.err.println("Found " + warnings.size() + " possible issues: ");
+ for (String warning : warnings) {
+ System.err.println(warning);
+ }
+ }
+
+
}
- public void setClasses(Map<String, ClassInfo> classes) {
- this.classes = classes;
+ private JarInfo jarInfoFromFile(File inputFile) throws IOException {
+ ObjectMapper mapper = new ObjectMapper();
+ JarInfo jarInfo = mapper.readValue(inputFile, JarInfo.class);
+ jarInfo.patchUpClassBackPointers(this);
+ return jarInfo;
}
- void compareAndReport(JarInfo underTest) {
- Set<ClassInfo> underTestClasses = new HashSet<>(underTest.classes.values());
- for (ClassInfo classInfo : classes.values()) {
- if (underTestClasses.contains(classInfo)) {
- classInfo.compareAndReport(underTest.classes.get(classInfo.name));
- underTestClasses.remove(classInfo);
- } else {
- container.errors.add(underTest + " does not contain class " + classInfo);
+ private static class JarInfo {
+ String name;
+ String version;
+ ApiExaminer container;
+ Map<String, ClassInfo> classes;
+
+ // For use by Jackson
+ public JarInfo() {
+
}
- }
- if (underTestClasses.size() > 0) {
- for (ClassInfo extra : underTestClasses) {
- container.warnings.add(underTest + " contains extra class " + extra);
+ JarInfo(String jarFile, ApiExaminer container) throws IOException, ClassNotFoundException {
+ this.container = container;
+ LOG.info("Processing jar " + jarFile);
+ File f = new File(jarFile);
+ Pattern pattern = Pattern.compile("(hadoop-[a-z\\-]+)-([0-9]\\.[0-9]\\.[0-9]).*");
+ Matcher matcher = pattern.matcher(f.getName());
+ if (!matcher.matches()) {
+ String msg = "Unable to determine name and version from " + f.getName();
+ LOG.error(msg);
+ throw new RuntimeException(msg);
+ }
+ name = matcher.group(1);
+ version = matcher.group(2);
+ classes = new HashMap<>();
+
+ JarFile jar = new JarFile(jarFile);
+ Enumeration<JarEntry> entries = jar.entries();
+ while (entries.hasMoreElements()) {
+ String name = entries.nextElement().getName();
+ if (name.endsWith(".class")) {
+ name = name.substring(0, name.length() - 6);
+ name = name.replace('/', '.');
+ if (!unloadableClasses.contains(name)) {
+ LOG.debug("Processing class " + name);
+ Class<?> clazz = Class.forName(name);
+ if (clazz.getAnnotation(InterfaceAudience.Public.class) != null &&
+ clazz.getAnnotation(InterfaceStability.Stable.class) != null) {
+ classes.put(name, new ClassInfo(this, clazz));
+ }
+ }
+ }
+ }
}
- }
- }
- void dumpToFile(File outputDir) throws IOException {
- File output = new File(outputDir, name + "-" + version + "-api-report.json");
- ObjectMapper mapper = new ObjectMapper();
- mapper.writeValue(output, this);
- }
+ public String getName() {
+ return name;
+ }
- void patchUpClassBackPointers(ApiExaminer container) {
- this.container = container;
- for (ClassInfo classInfo : classes.values()) {
- classInfo.setJar(this);
- classInfo.patchUpBackMethodBackPointers();
- }
- }
+ public void setName(String name) {
+ this.name = name;
+ }
- @Override
- public boolean equals(Object other) {
- if (!(other instanceof JarInfo)) return false;
- JarInfo that = (JarInfo)other;
- return name.equals(that.name) && version.equals(that.version);
- }
+ public String getVersion() {
+ return version;
+ }
- @Override
- public String toString() {
- return name + "-" + version;
- }
- }
+ public void setVersion(String version) {
+ this.version = version;
+ }
- private static class ClassInfo {
- @JsonIgnore JarInfo jar;
- String name;
- Map<String, MethodInfo> methods;
+ public Map<String, ClassInfo> getClasses() {
+ return classes;
+ }
- // For use by Jackson
- public ClassInfo() {
+ public void setClasses(Map<String, ClassInfo> classes) {
+ this.classes = classes;
+ }
- }
+ void compareAndReport(JarInfo underTest) {
+ Set<ClassInfo> underTestClasses = new HashSet<>(underTest.classes.values());
+ for (ClassInfo classInfo : classes.values()) {
+ if (underTestClasses.contains(classInfo)) {
+ classInfo.compareAndReport(underTest.classes.get(classInfo.name));
+ underTestClasses.remove(classInfo);
+ } else {
+ container.errors.add(underTest + " does not contain class " + classInfo);
+ }
+ }
- ClassInfo(JarInfo jar, Class<?> clazz) {
- this.jar = jar;
- this.name = clazz.getName();
- methods = new HashMap<>();
+ if (underTestClasses.size() > 0) {
+ for (ClassInfo extra : underTestClasses) {
+ container.warnings.add(underTest + " contains extra class " + extra);
+ }
+ }
+ }
- for (Method method : clazz.getMethods()) {
- if (method.getDeclaringClass().equals(clazz)) {
- LOG.debug("Processing method " + method.getName());
- MethodInfo mi = new MethodInfo(this, method);
- methods.put(mi.toString(), mi);
+ void dumpToFile(File outputDir) throws IOException {
+ File output = new File(outputDir, name + "-" + version + "-api-report.json");
+ ObjectMapper mapper = new ObjectMapper();
+ mapper.writeValue(output, this);
}
- }
- }
- public JarInfo getJar() {
- return jar;
- }
+ void patchUpClassBackPointers(ApiExaminer container) {
+ this.container = container;
+ for (ClassInfo classInfo : classes.values()) {
+ classInfo.setJar(this);
+ classInfo.patchUpBackMethodBackPointers();
+ }
+ }
- public void setJar(JarInfo jar) {
- this.jar = jar;
- }
+ @Override
+ public boolean equals(Object other) {
+ if (!(other instanceof JarInfo)) return false;
+ JarInfo that = (JarInfo) other;
+ return name.equals(that.name) && version.equals(that.version);
+ }
- public String getName() {
- return name;
+ @Override
+ public String toString() {
+ return name + "-" + version;
+ }
}
- public void setName(String name) {
- this.name = name;
- }
+ private static class ClassInfo {
+ @JsonIgnore
+ JarInfo jar;
+ String name;
+ Map<String, MethodInfo> methods;
- public Map<String, MethodInfo> getMethods() {
- return methods;
- }
+ // For use by Jackson
+ public ClassInfo() {
- public void setMethods(Map<String, MethodInfo> methods) {
- this.methods = methods;
- }
+ }
- void compareAndReport(ClassInfo underTest) {
- // Make a copy so we can remove them as we match them, making it easy to find additional ones
- Set<MethodInfo> underTestMethods = new HashSet<>(underTest.methods.values());
- for (MethodInfo methodInfo : methods.values()) {
- if (underTestMethods.contains(methodInfo)) {
- methodInfo.compareAndReport(underTest.methods.get(methodInfo.toString()));
- underTestMethods.remove(methodInfo);
- } else {
- jar.container.errors.add(underTest + " does not contain method " + methodInfo);
+ ClassInfo(JarInfo jar, Class<?> clazz) {
+ this.jar = jar;
+ this.name = clazz.getName();
+ methods = new HashMap<>();
+
+ for (Method method : clazz.getMethods()) {
+ if (method.getDeclaringClass().equals(clazz)) {
+ LOG.debug("Processing method " + method.getName());
+ MethodInfo mi = new MethodInfo(this, method);
+ methods.put(mi.toString(), mi);
+ }
+ }
}
- }
- if (underTestMethods.size() > 0) {
- for (MethodInfo extra : underTestMethods) {
- jar.container.warnings.add(underTest + " contains extra method " + extra);
+ public JarInfo getJar() {
+ return jar;
}
- }
- }
- void patchUpBackMethodBackPointers() {
- for (MethodInfo methodInfo : methods.values()) methodInfo.setContainingClass(this);
- }
+ public void setJar(JarInfo jar) {
+ this.jar = jar;
+ }
- @Override
- public boolean equals(Object other) {
- if (!(other instanceof ClassInfo)) return false;
- ClassInfo that = (ClassInfo)other;
- return name.equals(that.name); // Classes can be compared just on names
- }
+ public String getName() {
+ return name;
+ }
- @Override
- public int hashCode() {
- return name.hashCode();
- }
+ public void setName(String name) {
+ this.name = name;
+ }
- @Override
- public String toString() {
- return jar + " " + name;
- }
- }
+ public Map<String, MethodInfo> getMethods() {
+ return methods;
+ }
- private static class MethodInfo {
- @JsonIgnore ClassInfo containingClass;
- String name;
- String returnType;
- List<String> args;
- Set<String> exceptions;
+ public void setMethods(Map<String, MethodInfo> methods) {
+ this.methods = methods;
+ }
- // For use by Jackson
- public MethodInfo() {
+ void compareAndReport(ClassInfo underTest) {
+ // Make a copy so we can remove them as we match them, making it easy to find additional ones
+ Set<MethodInfo> underTestMethods = new HashSet<>(underTest.methods.values());
+ for (MethodInfo methodInfo : methods.values()) {
+ if (underTestMethods.contains(methodInfo)) {
+ methodInfo.compareAndReport(underTest.methods.get(methodInfo.toString()));
+ underTestMethods.remove(methodInfo);
+ } else {
+ jar.container.errors.add(underTest + " does not contain method " + methodInfo);
+ }
+ }
- }
+ if (underTestMethods.size() > 0) {
+ for (MethodInfo extra : underTestMethods) {
+ jar.container.warnings.add(underTest + " contains extra method " + extra);
+ }
+ }
+ }
- MethodInfo(ClassInfo containingClass, Method method) {
- this.containingClass = containingClass;
- this.name = method.getName();
- args = new ArrayList<>();
- for (Class<?> argClass : method.getParameterTypes()) {
- args.add(argClass.getName());
- }
- returnType = method.getReturnType().getName();
- exceptions = new HashSet<>();
- for (Class<?> exception : method.getExceptionTypes()) {
- exceptions.add(exception.getName());
- }
- }
+ void patchUpBackMethodBackPointers() {
+ for (MethodInfo methodInfo : methods.values()) methodInfo.setContainingClass(this);
+ }
- public ClassInfo getContainingClass() {
- return containingClass;
- }
+ @Override
+ public boolean equals(Object other) {
+ if (!(other instanceof ClassInfo)) return false;
+ ClassInfo that = (ClassInfo) other;
+ return name.equals(that.name); // Classes can be compared just on names
+ }
- public void setContainingClass(ClassInfo containingClass) {
- this.containingClass = containingClass;
- }
+ @Override
+ public int hashCode() {
+ return name.hashCode();
+ }
- public String getName() {
- return name;
+ @Override
+ public String toString() {
+ return jar + " " + name;
+ }
}
- public void setName(String name) {
- this.name = name;
- }
+ private static class MethodInfo {
+ @JsonIgnore
+ ClassInfo containingClass;
+ String name;
+ String returnType;
+ List<String> args;
+ Set<String> exceptions;
- public String getReturnType() {
- return returnType;
- }
+ // For use by Jackson
+ public MethodInfo() {
- public void setReturnType(String returnType) {
- this.returnType = returnType;
- }
+ }
- public List<String> getArgs() {
- return args;
- }
+ MethodInfo(ClassInfo containingClass, Method method) {
+ this.containingClass = containingClass;
+ this.name = method.getName();
+ args = new ArrayList<>();
+ for (Class<?> argClass : method.getParameterTypes()) {
+ args.add(argClass.getName());
+ }
+ returnType = method.getReturnType().getName();
+ exceptions = new HashSet<>();
+ for (Class<?> exception : method.getExceptionTypes()) {
+ exceptions.add(exception.getName());
+ }
+ }
- public void setArgs(List<String> args) {
- this.args = args;
- }
+ public ClassInfo getContainingClass() {
+ return containingClass;
+ }
- public Set<String> getExceptions() {
- return exceptions;
- }
+ public void setContainingClass(ClassInfo containingClass) {
+ this.containingClass = containingClass;
+ }
- public void setExceptions(Set<String> exceptions) {
- this.exceptions = exceptions;
- }
+ public String getName() {
+ return name;
+ }
- void compareAndReport(MethodInfo underTest) {
- // Check to see if they've added or removed exceptions
- // Make a copy so I can remove them as I check them off and easily find any that have been
- // added.
- Set<String> underTestExceptions = new HashSet<>(underTest.exceptions);
- for (String exception : exceptions) {
- if (underTest.exceptions.contains(exception)) {
- underTestExceptions.remove(exception);
- } else {
- containingClass.jar.container.warnings.add(underTest.containingClass.jar + " " +
- underTest.containingClass + "." + name + " removes exception " + exception);
- }
- }
- if (underTestExceptions.size() > 0) {
- for (String underTestException : underTest.exceptions) {
- containingClass.jar.container.warnings.add(underTest.containingClass.jar + " " +
- underTest.containingClass + "." + name + " adds exception " + underTestException);
- }
- }
- }
+ public void setName(String name) {
+ this.name = name;
+ }
- @Override
- public boolean equals(Object other) {
- if (!(other instanceof MethodInfo)) return false;
- MethodInfo that = (MethodInfo)other;
+ public String getReturnType() {
+ return returnType;
+ }
- return containingClass.equals(that.containingClass) && name.equals(that.name) &&
- returnType.equals(that.returnType) && args.equals(that.args);
- }
+ public void setReturnType(String returnType) {
+ this.returnType = returnType;
+ }
- @Override
- public int hashCode() {
- return ((containingClass.hashCode() * 31 + name.hashCode()) * 31 + returnType.hashCode()) * 31 +
- args.hashCode();
- }
+ public List<String> getArgs() {
+ return args;
+ }
+
+ public void setArgs(List<String> args) {
+ this.args = args;
+ }
- @Override
- public String toString() {
- StringBuilder buf = new StringBuilder(returnType)
- .append(" ")
- .append(name)
- .append('(');
- boolean first = true;
- for (String arg : args) {
- if (first) first = false;
- else buf.append(", ");
- buf.append(arg);
- }
- buf.append(")");
- if (exceptions.size() > 0) {
- buf.append(" throws ");
- first = true;
- for (String exception : exceptions) {
- if (first) first = false;
- else buf.append(", ");
- buf.append(exception);
- }
- }
- return buf.toString();
+ public Set<String> getExceptions() {
+ return exceptions;
+ }
+
+ public void setExceptions(Set<String> exceptions) {
+ this.exceptions = exceptions;
+ }
+
+ void compareAndReport(MethodInfo underTest) {
+ // Check to see if they've added or removed exceptions
+ // Make a copy so I can remove them as I check them off and easily find any that have been
+ // added.
+ Set<String> underTestExceptions = new HashSet<>(underTest.exceptions);
+ for (String exception : exceptions) {
+ if (underTest.exceptions.contains(exception)) {
+ underTestExceptions.remove(exception);
+ } else {
+ containingClass.jar.container.warnings.add(underTest.containingClass.jar + " " +
+ underTest.containingClass + "." + name + " removes exception " + exception);
+ }
+ }
+ if (underTestExceptions.size() > 0) {
+ for (String underTestException : underTest.exceptions) {
+ containingClass.jar.container.warnings.add(underTest.containingClass.jar + " " +
+ underTest.containingClass + "." + name + " adds exception " + underTestException);
+ }
+ }
+ }
+
+ @Override
+ public boolean equals(Object other) {
+ if (!(other instanceof MethodInfo)) return false;
+ MethodInfo that = (MethodInfo) other;
+
+ return containingClass.equals(that.containingClass) && name.equals(that.name) &&
+ returnType.equals(that.returnType) && args.equals(that.args);
+ }
+
+ @Override
+ public int hashCode() {
+ return ((containingClass.hashCode() * 31 + name.hashCode()) * 31 + returnType.hashCode()) * 31 +
+ args.hashCode();
+ }
+
+ @Override
+ public String toString() {
+ StringBuilder buf = new StringBuilder(returnType)
+ .append(" ")
+ .append(name)
+ .append('(');
+ boolean first = true;
+ for (String arg : args) {
+ if (first) first = false;
+ else buf.append(", ");
+ buf.append(arg);
+ }
+ buf.append(")");
+ if (exceptions.size() > 0) {
+ buf.append(" throws ");
+ first = true;
+ for (String exception : exceptions) {
+ if (first) first = false;
+ else buf.append(", ");
+ buf.append(exception);
+ }
+ }
+ return buf.toString();
+ }
}
- }
}
diff --git a/bigtop-tests/smoke-tests/odpi-runtime/src/main/java/org/odpi/specs/runtime/hive/HCatalogMR.java b/bigtop-tests/smoke-tests/odpi-runtime/src/main/java/org/odpi/specs/runtime/hive/HCatalogMR.java
index 6456cf2f..ccc15ebe 100644
--- a/bigtop-tests/smoke-tests/odpi-runtime/src/main/java/org/odpi/specs/runtime/hive/HCatalogMR.java
+++ b/bigtop-tests/smoke-tests/odpi-runtime/src/main/java/org/odpi/specs/runtime/hive/HCatalogMR.java
@@ -41,97 +41,98 @@ import java.net.URI;
import java.util.StringTokenizer;
public class HCatalogMR extends Configured implements Tool {
- private final static String INPUT_SCHEMA = "bigtop.test.hcat.schema.input";
- private final static String OUTPUT_SCHEMA = "bigtop.test.hcat.schema.output";
-
- @Override
- public int run(String[] args) throws Exception {
- String inputTable = null;
- String outputTable = null;
- String inputSchemaStr = null;
- String outputSchemaStr = null;
- for(int i = 0; i < args.length; i++){
- if(args[i].equalsIgnoreCase("-it")){
- inputTable = args[i+1];
- }else if(args[i].equalsIgnoreCase("-ot")){
- outputTable = args[i+1];
- }else if(args[i].equalsIgnoreCase("-is")){
- inputSchemaStr = args[i+1];
- }else if(args[i].equalsIgnoreCase("-os")){
- outputSchemaStr = args[i+1];
+ private final static String INPUT_SCHEMA = "bigtop.test.hcat.schema.input";
+ private final static String OUTPUT_SCHEMA = "bigtop.test.hcat.schema.output";
+
+ @Override
+ public int run(String[] args) throws Exception {
+ String inputTable = null;
+ String outputTable = null;
+ String inputSchemaStr = null;
+ String outputSchemaStr = null;
+ for (int i = 0; i < args.length; i++) {
+ if (args[i].equalsIgnoreCase("-it")) {
+ inputTable = args[i + 1];
+ } else if (args[i].equalsIgnoreCase("-ot")) {
+ outputTable = args[i + 1];
+ } else if (args[i].equalsIgnoreCase("-is")) {
+ inputSchemaStr = args[i + 1];
+ } else if (args[i].equalsIgnoreCase("-os")) {
+ outputSchemaStr = args[i + 1];
+ }
}
+
+ Configuration conf = getConf();
+ args = new GenericOptionsParser(conf, args).getRemainingArgs();
+
+ conf.set(INPUT_SCHEMA, inputSchemaStr);
+ conf.set(OUTPUT_SCHEMA, outputSchemaStr);
+
+ Job job = new Job(conf, "bigtop_hcat_test");
+ HCatInputFormat.setInput(job, "default", inputTable);
+
+ job.setInputFormatClass(HCatInputFormat.class);
+ job.setJarByClass(HCatalogMR.class);
+ job.setMapperClass(Map.class);
+ job.setReducerClass(Reduce.class);
+ job.setMapOutputKeyClass(Text.class);
+ job.setMapOutputValueClass(IntWritable.class);
+ job.setOutputKeyClass(WritableComparable.class);
+ job.setOutputValueClass(HCatRecord.class);
+ HCatOutputFormat.setOutput(job, OutputJobInfo.create("default", outputTable, null));
+ HCatOutputFormat.setSchema(job, HCatSchemaUtils.getHCatSchema(outputSchemaStr));
+ job.setOutputFormatClass(HCatOutputFormat.class);
+
+ return job.waitForCompletion(true) ? 0 : 1;
+
+
}
-
- Configuration conf = getConf();
- args = new GenericOptionsParser(conf, args).getRemainingArgs();
-
- conf.set(INPUT_SCHEMA, inputSchemaStr);
- conf.set(OUTPUT_SCHEMA, outputSchemaStr);
-
- Job job = new Job(conf, "bigtop_hcat_test");
- HCatInputFormat.setInput(job, "default", inputTable);
-
- job.setInputFormatClass(HCatInputFormat.class);
- job.setJarByClass(HCatalogMR.class);
- job.setMapperClass(Map.class);
- job.setReducerClass(Reduce.class);
- job.setMapOutputKeyClass(Text.class);
- job.setMapOutputValueClass(IntWritable.class);
- job.setOutputKeyClass(WritableComparable.class);
- job.setOutputValueClass(HCatRecord.class);
- HCatOutputFormat.setOutput(job, OutputJobInfo.create("default", outputTable, null));
- HCatOutputFormat.setSchema(job, HCatSchemaUtils.getHCatSchema(outputSchemaStr));
- job.setOutputFormatClass(HCatOutputFormat.class);
-
- return job.waitForCompletion(true) ? 0 : 1;
-
-
- }
- public static class Map extends Mapper<WritableComparable,
- HCatRecord, Text, IntWritable> {
- private final static IntWritable one = new IntWritable(1);
- private Text word = new Text();
- private HCatSchema inputSchema = null;
- @Override
- protected void map(WritableComparable key, HCatRecord value, Context context)
- throws IOException, InterruptedException {
- if (inputSchema == null) {
- inputSchema =
- HCatSchemaUtils.getHCatSchema(context.getConfiguration().get(INPUT_SCHEMA));
- }
- String line = value.getString("line", inputSchema);
- StringTokenizer tokenizer = new StringTokenizer(line);
- while (tokenizer.hasMoreTokens()) {
- word.set(tokenizer.nextToken());
- context.write(word, one);
- }
+ public static class Map extends Mapper<WritableComparable,
+ HCatRecord, Text, IntWritable> {
+ private final static IntWritable one = new IntWritable(1);
+ private Text word = new Text();
+ private HCatSchema inputSchema = null;
+
+ @Override
+ protected void map(WritableComparable key, HCatRecord value, Context context)
+ throws IOException, InterruptedException {
+ if (inputSchema == null) {
+ inputSchema =
+ HCatSchemaUtils.getHCatSchema(context.getConfiguration().get(INPUT_SCHEMA));
+ }
+ String line = value.getString("line", inputSchema);
+ StringTokenizer tokenizer = new StringTokenizer(line);
+ while (tokenizer.hasMoreTokens()) {
+ word.set(tokenizer.nextToken());
+ context.write(word, one);
+ }
+ }
}
- }
- public static class Reduce extends Reducer<Text, IntWritable, WritableComparable, HCatRecord> {
- private HCatSchema outputSchema = null;
+ public static class Reduce extends Reducer<Text, IntWritable, WritableComparable, HCatRecord> {
+ private HCatSchema outputSchema = null;
- @Override
- protected void reduce(Text key, Iterable<IntWritable> values, Context context) throws
- IOException, InterruptedException {
- if (outputSchema == null) {
- outputSchema =
- HCatSchemaUtils.getHCatSchema(context.getConfiguration().get(OUTPUT_SCHEMA));
- }
- int sum = 0;
- for (IntWritable i : values) {
- sum += i.get();
- }
- HCatRecord output = new DefaultHCatRecord(2);
- output.set("word", outputSchema, key);
- output.set("count", outputSchema, sum);
- context.write(null, output);
+ @Override
+ protected void reduce(Text key, Iterable<IntWritable> values, Context context) throws
+ IOException, InterruptedException {
+ if (outputSchema == null) {
+ outputSchema =
+ HCatSchemaUtils.getHCatSchema(context.getConfiguration().get(OUTPUT_SCHEMA));
+ }
+ int sum = 0;
+ for (IntWritable i : values) {
+ sum += i.get();
+ }
+ HCatRecord output = new DefaultHCatRecord(2);
+ output.set("word", outputSchema, key);
+ output.set("count", outputSchema, sum);
+ context.write(null, output);
+ }
}
- }
- public static void main(String[] args) throws Exception {
- int exitCode = ToolRunner.run(new HCatalogMR(), args);
- System.exit(exitCode);
- }
- }
+ public static void main(String[] args) throws Exception {
+ int exitCode = ToolRunner.run(new HCatalogMR(), args);
+ System.exit(exitCode);
+ }
+}
diff --git a/bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/HiveHelper.java b/bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/HiveHelper.java
index ee20588d..f722d63a 100644
--- a/bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/HiveHelper.java
+++ b/bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/HiveHelper.java
@@ -34,88 +34,88 @@ import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
public class HiveHelper {
-
- private static final Log LOG = LogFactory.getLog(HiveHelper.class.getName());
- public static Map<String, String> execCommand(CommandLine commandline) {
- return execCommand(commandline, null);
- }
+ private static final Log LOG = LogFactory.getLog(HiveHelper.class.getName());
- public static Map<String, String> execCommand(CommandLine commandline,
- Map<String, String> envVars) {
-
- System.out.println("Executing command:");
- System.out.println(commandline.toString());
- Map<String, String> env = null;
- Map<String, String> entry = new HashMap<String, String>();
- try {
- env = EnvironmentUtils.getProcEnvironment();
- } catch (IOException e1) {
- // TODO Auto-generated catch block
- LOG.debug("Failed to get process environment: "+ e1.getMessage());
- e1.printStackTrace();
- }
- if (envVars != null) {
- for (String key : envVars.keySet()) {
- env.put(key, envVars.get(key));
- }
- }
+ public static Map<String, String> execCommand(CommandLine commandline) {
+ return execCommand(commandline, null);
+ }
- DefaultExecuteResultHandler resultHandler = new DefaultExecuteResultHandler();
- ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
- PumpStreamHandler streamHandler = new PumpStreamHandler(outputStream);
- ExecuteWatchdog watchdog = new ExecuteWatchdog(60*10000);
- Executor executor = new DefaultExecutor();
- executor.setExitValue(1);
- executor.setWatchdog(watchdog);
- executor.setStreamHandler(streamHandler);
- try {
- executor.execute(commandline, env, resultHandler);
- } catch (ExecuteException e) {
- // TODO Auto-generated catch block
- LOG.debug("Failed to execute command with exit value: "+ String.valueOf(resultHandler.getExitValue()));
- LOG.debug("outputStream: "+ outputStream.toString());
- entry.put("exitValue", String.valueOf(resultHandler.getExitValue()));
- entry.put("outputStream", outputStream.toString() + e.getMessage());
- e.printStackTrace();
- return entry;
- } catch (IOException e) {
- // TODO Auto-generated catch block
- LOG.debug("Failed to execute command with exit value: "+ String.valueOf(resultHandler.getExitValue()));
- LOG.debug("outputStream: "+ outputStream.toString());
- entry.put("exitValue", String.valueOf(resultHandler.getExitValue()));
- entry.put("outputStream", outputStream.toString() + e.getMessage());
- e.printStackTrace();
- return entry;
- }
-
- try {
- resultHandler.waitFor();
- /*System.out.println("Command output: "+outputStream.toString());*/
- entry.put("exitValue", String.valueOf(resultHandler.getExitValue()));
- entry.put("outputStream", outputStream.toString());
- return entry;
- } catch (InterruptedException e) {
- // TODO Auto-generated catch block
+ public static Map<String, String> execCommand(CommandLine commandline,
+ Map<String, String> envVars) {
+
+ System.out.println("Executing command:");
+ System.out.println(commandline.toString());
+ Map<String, String> env = null;
+ Map<String, String> entry = new HashMap<String, String>();
+ try {
+ env = EnvironmentUtils.getProcEnvironment();
+ } catch (IOException e1) {
+ // TODO Auto-generated catch block
+ LOG.debug("Failed to get process environment: " + e1.getMessage());
+ e1.printStackTrace();
+ }
+ if (envVars != null) {
+ for (String key : envVars.keySet()) {
+ env.put(key, envVars.get(key));
+ }
+ }
+
+ DefaultExecuteResultHandler resultHandler = new DefaultExecuteResultHandler();
+ ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
+ PumpStreamHandler streamHandler = new PumpStreamHandler(outputStream);
+ ExecuteWatchdog watchdog = new ExecuteWatchdog(60 * 10000);
+ Executor executor = new DefaultExecutor();
+ executor.setExitValue(1);
+ executor.setWatchdog(watchdog);
+ executor.setStreamHandler(streamHandler);
+ try {
+ executor.execute(commandline, env, resultHandler);
+ } catch (ExecuteException e) {
+ // TODO Auto-generated catch block
+ LOG.debug("Failed to execute command with exit value: " + String.valueOf(resultHandler.getExitValue()));
+ LOG.debug("outputStream: " + outputStream.toString());
+ entry.put("exitValue", String.valueOf(resultHandler.getExitValue()));
+ entry.put("outputStream", outputStream.toString() + e.getMessage());
+ e.printStackTrace();
+ return entry;
+ } catch (IOException e) {
+ // TODO Auto-generated catch block
+ LOG.debug("Failed to execute command with exit value: " + String.valueOf(resultHandler.getExitValue()));
+ LOG.debug("outputStream: " + outputStream.toString());
+ entry.put("exitValue", String.valueOf(resultHandler.getExitValue()));
+ entry.put("outputStream", outputStream.toString() + e.getMessage());
+ e.printStackTrace();
+ return entry;
+ }
+
+ try {
+ resultHandler.waitFor();
+ /*System.out.println("Command output: "+outputStream.toString());*/
+ entry.put("exitValue", String.valueOf(resultHandler.getExitValue()));
+ entry.put("outputStream", outputStream.toString());
+ return entry;
+ } catch (InterruptedException e) {
+ // TODO Auto-generated catch block
/*System.out.println("Command output: "+outputStream.toString());*/
- LOG.debug("exitValue: "+ String.valueOf(resultHandler.getExitValue()));
- LOG.debug("outputStream: "+ outputStream.toString());
- entry.put("exitValue", String.valueOf(resultHandler.getExitValue()));
- entry.put("outputStream", outputStream.toString());
- e.printStackTrace();
- return entry;
- }
- }
-
- protected static String getProperty(String property, String description) {
- String val = System.getProperty(property);
- if (val == null) {
- throw new RuntimeException("You must set the property " + property + " with " +
- description);
- }
- LOG.debug(description + " is " + val);
- return val;
- }
-
+ LOG.debug("exitValue: " + String.valueOf(resultHandler.getExitValue()));
+ LOG.debug("outputStream: " + outputStream.toString());
+ entry.put("exitValue", String.valueOf(resultHandler.getExitValue()));
+ entry.put("outputStream", outputStream.toString());
+ e.printStackTrace();
+ return entry;
+ }
+ }
+
+ protected static String getProperty(String property, String description) {
+ String val = System.getProperty(property);
+ if (val == null) {
+ throw new RuntimeException("You must set the property " + property + " with " +
+ description);
+ }
+ LOG.debug(description + " is " + val);
+ return val;
+ }
+
}
diff --git a/bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/JdbcConnector.java b/bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/JdbcConnector.java
index 3b3ac51a..35b9a3a6 100644
--- a/bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/JdbcConnector.java
+++ b/bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/JdbcConnector.java
@@ -28,52 +28,52 @@ import java.sql.SQLException;
import java.util.Properties;
public class JdbcConnector {
- private static final Log LOG = LogFactory.getLog(JdbcConnector.class.getName());
+ private static final Log LOG = LogFactory.getLog(JdbcConnector.class.getName());
- protected static final String URL = "bigtop.test.hive.jdbc.url";
- protected static final String USER = "bigtop.test.hive.jdbc.user";
- protected static final String PASSWD = "bigtop.test.hive.jdbc.password";
- protected static final String LOCATION = "bigtop.test.hive.location";
- protected static final String METASTORE_URL = "bigtop.test.hive.metastore.url";
- protected static final String TEST_THRIFT = "bigtop.test.hive.thrift.test";
- protected static final String TEST_HCATALOG = "bigtop.test.hive.hcatalog.test";
- protected static final String HIVE_CONF_DIR = "bigtop.test.hive.conf.dir";
- protected static final String HADOOP_CONF_DIR = "bigtop.test.hadoop.conf.dir";
+ protected static final String URL = "bigtop.test.hive.jdbc.url";
+ protected static final String USER = "bigtop.test.hive.jdbc.user";
+ protected static final String PASSWD = "bigtop.test.hive.jdbc.password";
+ protected static final String LOCATION = "bigtop.test.hive.location";
+ protected static final String METASTORE_URL = "bigtop.test.hive.metastore.url";
+ protected static final String TEST_THRIFT = "bigtop.test.hive.thrift.test";
+ protected static final String TEST_HCATALOG = "bigtop.test.hive.hcatalog.test";
+ protected static final String HIVE_CONF_DIR = "bigtop.test.hive.conf.dir";
+ protected static final String HADOOP_CONF_DIR = "bigtop.test.hadoop.conf.dir";
- protected static Connection conn;
+ protected static Connection conn;
- @BeforeClass
- public static void connectToJdbc() throws SQLException {
- // Assume they've put the URL for the JDBC driver in an environment variable.
- String jdbcUrl = getProperty(URL, "the JDBC URL");
- String jdbcUser = getProperty(USER, "the JDBC user name");
- String jdbcPasswd = getProperty(PASSWD, "the JDBC password");
+ @BeforeClass
+ public static void connectToJdbc() throws SQLException {
+ // Assume they've put the URL for the JDBC driver in an environment variable.
+ String jdbcUrl = getProperty(URL, "the JDBC URL");
+ String jdbcUser = getProperty(USER, "the JDBC user name");
+ String jdbcPasswd = getProperty(PASSWD, "the JDBC password");
- Properties props = new Properties();
- props.put("user", jdbcUser);
- if (!jdbcPasswd.equals("")) props.put("password", jdbcPasswd);
- conn = DriverManager.getConnection(jdbcUrl, props);
- }
+ Properties props = new Properties();
+ props.put("user", jdbcUser);
+ if (!jdbcPasswd.equals("")) props.put("password", jdbcPasswd);
+ conn = DriverManager.getConnection(jdbcUrl, props);
+ }
- @AfterClass
- public static void closeJdbc() throws SQLException {
- if (conn != null) conn.close();
- }
+ @AfterClass
+ public static void closeJdbc() throws SQLException {
+ if (conn != null) conn.close();
+ }
- protected static String getProperty(String property, String description) {
- String val = System.getProperty(property);
- if (val == null) {
- throw new RuntimeException("You must set the property " + property + " with " +
- description);
+ protected static String getProperty(String property, String description) {
+ String val = System.getProperty(property);
+ if (val == null) {
+ throw new RuntimeException("You must set the property " + property + " with " +
+ description);
+ }
+ LOG.debug(description + " is " + val);
+ return val;
}
- LOG.debug(description + " is " + val);
- return val;
- }
- protected static boolean testActive(String property, String description) {
- String val = System.getProperty(property, "true");
- LOG.debug(description + " is " + val);
- return Boolean.valueOf(val);
- }
+ protected static boolean testActive(String property, String description) {
+ String val = System.getProperty(property, "true");
+ LOG.debug(description + " is " + val);
+ return Boolean.valueOf(val);
+ }
}
diff --git a/bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/TestBeeline.java b/bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/TestBeeline.java
index bc2ab77f..85d824ef 100644
--- a/bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/TestBeeline.java
+++ b/bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/TestBeeline.java
@@ -24,178 +24,204 @@ import org.junit.AfterClass;
import org.junit.Assert;
import org.junit.BeforeClass;
import org.junit.Test;
+
import java.io.FileNotFoundException;
import java.io.PrintWriter;
import java.util.Map;
public class TestBeeline {
-
- public static final Log LOG = LogFactory.getLog(TestBeeline.class.getName());
-
- private static final String URL = "bigtop.test.hive.jdbc.url";
- private static final String USER = "bigtop.test.hive.jdbc.user";
- private static final String PASSWD = "bigtop.test.hive.jdbc.password";
-
- private static Map<String, String> results;
- private static String beelineUrl;
- private static String beelineUser;
- private static String beelinePasswd;
-
- //creating beeline base command with username and password as per inputs
- private static CommandLine beelineBaseCommand = new CommandLine("beeline");
-
- @BeforeClass
- public static void initialSetup(){
- TestBeeline.beelineUrl = System.getProperty(URL);
- TestBeeline.beelineUser = System.getProperty(USER);
- TestBeeline.beelinePasswd =System.getProperty(PASSWD);
-
- if (beelineUser != null && beelineUser != "" && beelinePasswd != null && beelinePasswd != "")
- {
- beelineBaseCommand.addArgument("-u").addArgument(beelineUrl).addArgument("-n").addArgument(beelineUser).addArgument("-p").addArgument(beelinePasswd);
- }
- else if (beelineUser != null && beelineUser != "")
- {
- beelineBaseCommand.addArgument("-u").addArgument(beelineUrl).addArgument("-n").addArgument(beelineUser);
- }
- else {
- beelineBaseCommand.addArgument("-u").addArgument(beelineUrl);
- }
- LOG.info("URL is " + beelineUrl);
- LOG.info("User is " + beelineUser);
- LOG.info("Passwd is " + beelinePasswd);
- LOG.info("Passwd is null " + (beelinePasswd == null));
- }
-
- @Test
- public void checkBeeline() {
- results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand));
- String consoleMsg = results.get("outputStream").toLowerCase();
- Assert.assertEquals("beeline -u FAILED. \n" +results.get("outputStream"), true, consoleMsg.contains("connecting to "+beelineUrl.toLowerCase()) && !consoleMsg.contains("error") && !consoleMsg.contains("exception"));
- }
-
- @Test
- public void checkBeelineConnect(){
- try(PrintWriter out = new PrintWriter("connect.url")){ out.println("!connect " + beelineUrl+" "+beelineUser+" "+beelinePasswd); out.println("!quit"); }
- catch (FileNotFoundException e1) {
- e1.printStackTrace();
- }
- results = HiveHelper.execCommand(new CommandLine("/bin/sh").addArgument("-c").addArgument("beeline -f connect.url",false));
- String consoleMsg = results.get("outputStream").toLowerCase();
- Assert.assertEquals("beeline !connect FAILED. \n" +results.get("outputStream"), true, consoleMsg.contains("connecting to "+beelineUrl.toLowerCase()) && !consoleMsg.contains("error") && !consoleMsg.contains("exception") );
- }
-
- @Test
- public void checkBeelineHelp(){
- results = HiveHelper.execCommand(new CommandLine("beeline").addArgument("--help"));
- String consoleMsg = results.get("outputStream").toLowerCase();
- Assert.assertEquals("beeline --help FAILED. \n" +results.get("outputStream"), true, consoleMsg.contains("display this message" ) && consoleMsg.contains("usage: java org.apache.hive.cli.beeline.beeline") && !consoleMsg.contains("exception"));
- }
-
- @Test
- public void checkBeelineQueryExecFromCmdLine(){
- results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("-e").addArgument("SHOW DATABASES;"));
- if(!results.get("outputStream").contains("bigtop_runtime_hive")){
- results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("-e").addArgument("CREATE DATABASE bigtop_runtime_hive;"));
- results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("-e").addArgument("SHOW DATABASES;"));
- }else{
- results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("-e").addArgument("DROP DATABASE bigtop_runtime_hive;"));
- results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("-e").addArgument("CREATE DATABASE bigtop_runtime_hive;"));
- results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("-e").addArgument("SHOW DATABASES;"));
- }
- String consoleMsg = results.get("outputStream").toLowerCase();
- Assert.assertEquals("beeline -e FAILED. \n" +results.get("outputStream"), true, consoleMsg.contains("bigtop_runtime_hive" ) && !consoleMsg.contains("error") && !consoleMsg.contains("exception"));
- HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("-e").addArgument("DROP DATABASE bigtop_runtime_hive"));
- }
-
- @Test
- public void checkBeelineQueryExecFromFile() throws FileNotFoundException{
-
- try(PrintWriter out = new PrintWriter("beeline-f1.sql")){ out.println("SHOW DATABASES;"); }
- try(PrintWriter out = new PrintWriter("beeline-f2.sql")){ out.println("CREATE DATABASE bigtop_runtime_hive;"); }
- try(PrintWriter out = new PrintWriter("beeline-f3.sql")){ out.println("DROP DATABASE bigtop_runtime_hive;"); out.println("CREATE DATABASE bigtop_runtime_hive;"); }
- try(PrintWriter out = new PrintWriter("beeline-f4.sql")){ out.println("DROP DATABASE bigtop_runtime_hive;"); }
- results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("-f").addArgument("beeline-f1.sql",false));
-
- if(!results.get("outputStream").contains("bigtop_runtime_hive")){
- results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("-f").addArgument("beeline-f2.sql",false));
- }else{
- results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("-f").addArgument("beeline-f3.sql",false));
- }
-
- results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("-f").addArgument("beeline-f1.sql",false));
-
- String consoleMsg = results.get("outputStream").toLowerCase();
- Assert.assertEquals("beeline -f FAILED. \n" +results.get("outputStream"), true, consoleMsg.contains("bigtop_runtime_hive" ) && !consoleMsg.contains("error") && !consoleMsg.contains("exception"));
- HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("-f").addArgument("beeline-f4.sql",false));
- }
-
- @Test
- public void checkBeelineInitFile() throws FileNotFoundException{
-
- try(PrintWriter out = new PrintWriter("beeline-i1.sql")){ out.println("SHOW DATABASES;"); }
- try(PrintWriter out = new PrintWriter("beeline-i2.sql")){ out.println("CREATE DATABASE bigtop_runtime_beeline_init;"); }
- try(PrintWriter out = new PrintWriter("beeline-i3.sql")){ out.println("DROP DATABASE bigtop_runtime_beeline_init;"); out.println("CREATE DATABASE bigtop_runtime_beeline_init;"); }
- try(PrintWriter out = new PrintWriter("beeline-i4.sql")){ out.println("DROP DATABASE bigtop_runtime_beeline_init;"); }
- results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("-i").addArgument("beeline-i1.sql",false));
-
- if(!results.get("outputStream").contains("bigtop_runtime_beeline_init")){
- results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("-i").addArgument("beeline-i2.sql",false));
- }else{
- results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("-i").addArgument("beeline-i3.sql",false));
- }
-
- results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("-i").addArgument("beeline-i1.sql",false));
- String consoleMsg = results.get("outputStream").toLowerCase();
- Assert.assertEquals("beeline -i FAILED. \n" +results.get("outputStream"), true, consoleMsg.contains("bigtop_runtime_beeline_init") && !consoleMsg.contains("error") && !consoleMsg.contains("exception"));
- HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("-i").addArgument("beeline-i4.sql",false));
- }
-
- @Test
- public void checkBeelineHiveVar() throws FileNotFoundException{
-
- try(PrintWriter out = new PrintWriter("beeline-hv1.sql")){ out.println("SHOW DATABASES;"); }
- try(PrintWriter out = new PrintWriter("beeline-hv2.sql")){ out.println("CREATE DATABASE ${db};"); }
- try(PrintWriter out = new PrintWriter("beeline-hv3.sql")){ out.println("DROP DATABASE ${db};"); out.println("CREATE DATABASE ${db};"); }
- try(PrintWriter out = new PrintWriter("beeline-hv4.sql")){ out.println("DROP DATABASE ${db};"); }
- results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("--hivevar").addArgument("db=bigtop_runtime_beeline_hivevar").addArgument("-i").addArgument("beeline-hv1.sql",false));
-
- if(!results.get("outputStream").contains("bigtop_runtime_beeline_hivevar")){
- results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("--hivevar").addArgument("db=bigtop_runtime_beeline_hivevar").addArgument("-i").addArgument("beeline-hv2.sql",false));
- }else{
- results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("--hivevar").addArgument("db=bigtop_runtime_beeline_hivevar").addArgument("-i").addArgument("beeline-hv3.sql",false));
- }
-
- results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("--hivevar").addArgument("db=bigtop_runtime_beeline_hivevar").addArgument("-i").addArgument("beeline-hv1.sql",false));
- String consoleMsg = results.get("outputStream").toLowerCase();
- Assert.assertEquals("beeline --hivevar FAILED. \n" +results.get("outputStream"), true, consoleMsg.contains("bigtop_runtime_beeline_hivevar") && !consoleMsg.contains("error") && !consoleMsg.contains("exception"));
- HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("--hivevar").addArgument("db=bigtop_runtime_beeline_hivevar").addArgument("-i").addArgument("beeline-hv4.sql",false));
- }
-
- @Test
- public void checkBeelineFastConnect(){
- results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("--fastConnect=false"));
- String consoleMsg = results.get("outputStream").toLowerCase();
- Assert.assertEquals("beeline --fastConnect FAILED. \n" +results.get("outputStream"), true, consoleMsg.contains("set fastconnect to true to skip"));
- }
-
- @Test
- public void checkBeelineVerbose(){
- results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("--verbose=true"));
- String consoleMsg = results.get("outputStream").toLowerCase();
- Assert.assertEquals("beeline --verbose FAILED." +results.get("outputStream"), true, consoleMsg.contains("issuing: !connect jdbc:hive2:") && !consoleMsg.contains("error") && !consoleMsg.contains("exception"));
- }
-
- @Test
- public void checkBeelineShowHeader(){
- results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("--showHeader=false").addArgument("-e").addArgument("SHOW DATABASES;"));
- String consoleMsg = results.get("outputStream").toLowerCase();
- Assert.assertEquals("beeline --showHeader FAILED. \n" +results.get("outputStream"), true, consoleMsg.contains("default")&&!consoleMsg.contains("database_name") && !consoleMsg.contains("error") && !consoleMsg.contains("exception"));
- }
-
- @AfterClass
- public static void cleanup() throws FileNotFoundException {
- results = HiveHelper.execCommand(new CommandLine("/bin/sh").addArgument("-c").addArgument("rm -rf beeline*.sql", false));
- results = HiveHelper.execCommand(new CommandLine("/bin/sh").addArgument("-c").addArgument("rm -rf connect.url", false));
- }
+
+ public static final Log LOG = LogFactory.getLog(TestBeeline.class.getName());
+
+ private static final String URL = "bigtop.test.hive.jdbc.url";
+ private static final String USER = "bigtop.test.hive.jdbc.user";
+ private static final String PASSWD = "bigtop.test.hive.jdbc.password";
+
+ private static Map<String, String> results;
+ private static String beelineUrl;
+ private static String beelineUser;
+ private static String beelinePasswd;
+
+ //creating beeline base command with username and password as per inputs
+ private static CommandLine beelineBaseCommand = new CommandLine("beeline");
+
+ @BeforeClass
+ public static void initialSetup() {
+ TestBeeline.beelineUrl = System.getProperty(URL);
+ TestBeeline.beelineUser = System.getProperty(USER);
+ TestBeeline.beelinePasswd = System.getProperty(PASSWD);
+
+ if (beelineUser != null && beelineUser != "" && beelinePasswd != null && beelinePasswd != "") {
+ beelineBaseCommand.addArgument("-u").addArgument(beelineUrl).addArgument("-n").addArgument(beelineUser).addArgument("-p").addArgument(beelinePasswd);
+ } else if (beelineUser != null && beelineUser != "") {
+ beelineBaseCommand.addArgument("-u").addArgument(beelineUrl).addArgument("-n").addArgument(beelineUser);
+ } else {
+ beelineBaseCommand.addArgument("-u").addArgument(beelineUrl);
+ }
+ LOG.info("URL is " + beelineUrl);
+ LOG.info("User is " + beelineUser);
+ LOG.info("Passwd is " + beelinePasswd);
+ LOG.info("Passwd is null " + (beelinePasswd == null));
+ }
+
+ @Test
+ public void checkBeeline() {
+ results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand));
+ String consoleMsg = results.get("outputStream").toLowerCase();
+ Assert.assertEquals("beeline -u FAILED. \n" + results.get("outputStream"), true, consoleMsg.contains("connecting to " + beelineUrl.toLowerCase()) && !consoleMsg.contains("error") && !consoleMsg.contains("exception"));
+ }
+
+ @Test
+ public void checkBeelineConnect() {
+ try (PrintWriter out = new PrintWriter("connect.url")) {
+ out.println("!connect " + beelineUrl + " " + beelineUser + " " + beelinePasswd);
+ out.println("!quit");
+ } catch (FileNotFoundException e1) {
+ e1.printStackTrace();
+ }
+ results = HiveHelper.execCommand(new CommandLine("/bin/sh").addArgument("-c").addArgument("beeline -f connect.url", false));
+ String consoleMsg = results.get("outputStream").toLowerCase();
+ Assert.assertEquals("beeline !connect FAILED. \n" + results.get("outputStream"), true, consoleMsg.contains("connecting to " + beelineUrl.toLowerCase()) && !consoleMsg.contains("error") && !consoleMsg.contains("exception"));
+ }
+
+ @Test
+ public void checkBeelineHelp() {
+ results = HiveHelper.execCommand(new CommandLine("beeline").addArgument("--help"));
+ String consoleMsg = results.get("outputStream").toLowerCase();
+ Assert.assertEquals("beeline --help FAILED. \n" + results.get("outputStream"), true, consoleMsg.contains("display this message") && consoleMsg.contains("usage: java org.apache.hive.cli.beeline.beeline") && !consoleMsg.contains("exception"));
+ }
+
+ @Test
+ public void checkBeelineQueryExecFromCmdLine() {
+ results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("-e").addArgument("SHOW DATABASES;"));
+ if (!results.get("outputStream").contains("bigtop_runtime_hive")) {
+ results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("-e").addArgument("CREATE DATABASE bigtop_runtime_hive;"));
+ results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("-e").addArgument("SHOW DATABASES;"));
+ } else {
+ results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("-e").addArgument("DROP DATABASE bigtop_runtime_hive;"));
+ results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("-e").addArgument("CREATE DATABASE bigtop_runtime_hive;"));
+ results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("-e").addArgument("SHOW DATABASES;"));
+ }
+ String consoleMsg = results.get("outputStream").toLowerCase();
+ Assert.assertEquals("beeline -e FAILED. \n" + results.get("outputStream"), true, consoleMsg.contains("bigtop_runtime_hive") && !consoleMsg.contains("error") && !consoleMsg.contains("exception"));
+ HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("-e").addArgument("DROP DATABASE bigtop_runtime_hive"));
+ }
+
+ @Test
+ public void checkBeelineQueryExecFromFile() throws FileNotFoundException {
+
+ try (PrintWriter out = new PrintWriter("beeline-f1.sql")) {
+ out.println("SHOW DATABASES;");
+ }
+ try (PrintWriter out = new PrintWriter("beeline-f2.sql")) {
+ out.println("CREATE DATABASE bigtop_runtime_hive;");
+ }
+ try (PrintWriter out = new PrintWriter("beeline-f3.sql")) {
+ out.println("DROP DATABASE bigtop_runtime_hive;");
+ out.println("CREATE DATABASE bigtop_runtime_hive;");
+ }
+ try (PrintWriter out = new PrintWriter("beeline-f4.sql")) {
+ out.println("DROP DATABASE bigtop_runtime_hive;");
+ }
+ results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("-f").addArgument("beeline-f1.sql", false));
+
+ if (!results.get("outputStream").contains("bigtop_runtime_hive")) {
+ results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("-f").addArgument("beeline-f2.sql", false));
+ } else {
+ results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("-f").addArgument("beeline-f3.sql", false));
+ }
+
+ results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("-f").addArgument("beeline-f1.sql", false));
+
+ String consoleMsg = results.get("outputStream").toLowerCase();
+ Assert.assertEquals("beeline -f FAILED. \n" + results.get("outputStream"), true, consoleMsg.contains("bigtop_runtime_hive") && !consoleMsg.contains("error") && !consoleMsg.contains("exception"));
+ HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("-f").addArgument("beeline-f4.sql", false));
+ }
+
+ @Test
+ public void checkBeelineInitFile() throws FileNotFoundException {
+
+ try (PrintWriter out = new PrintWriter("beeline-i1.sql")) {
+ out.println("SHOW DATABASES;");
+ }
+ try (PrintWriter out = new PrintWriter("beeline-i2.sql")) {
+ out.println("CREATE DATABASE bigtop_runtime_beeline_init;");
+ }
+ try (PrintWriter out = new PrintWriter("beeline-i3.sql")) {
+ out.println("DROP DATABASE bigtop_runtime_beeline_init;");
+ out.println("CREATE DATABASE bigtop_runtime_beeline_init;");
+ }
+ try (PrintWriter out = new PrintWriter("beeline-i4.sql")) {
+ out.println("DROP DATABASE bigtop_runtime_beeline_init;");
+ }
+ results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("-i").addArgument("beeline-i1.sql", false));
+
+ if (!results.get("outputStream").contains("bigtop_runtime_beeline_init")) {
+ results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("-i").addArgument("beeline-i2.sql", false));
+ } else {
+ results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("-i").addArgument("beeline-i3.sql", false));
+ }
+
+ results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("-i").addArgument("beeline-i1.sql", false));
+ String consoleMsg = results.get("outputStream").toLowerCase();
+ Assert.assertEquals("beeline -i FAILED. \n" + results.get("outputStream"), true, consoleMsg.contains("bigtop_runtime_beeline_init") && !consoleMsg.contains("error") && !consoleMsg.contains("exception"));
+ HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("-i").addArgument("beeline-i4.sql", false));
+ }
+
+ @Test
+ public void checkBeelineHiveVar() throws FileNotFoundException {
+
+ try (PrintWriter out = new PrintWriter("beeline-hv1.sql")) {
+ out.println("SHOW DATABASES;");
+ }
+ try (PrintWriter out = new PrintWriter("beeline-hv2.sql")) {
+ out.println("CREATE DATABASE ${db};");
+ }
+ try (PrintWriter out = new PrintWriter("beeline-hv3.sql")) {
+ out.println("DROP DATABASE ${db};");
+ out.println("CREATE DATABASE ${db};");
+ }
+ try (PrintWriter out = new PrintWriter("beeline-hv4.sql")) {
+ out.println("DROP DATABASE ${db};");
+ }
+ results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("--hivevar").addArgument("db=bigtop_runtime_beeline_hivevar").addArgument("-i").addArgument("beeline-hv1.sql", false));
+
+ if (!results.get("outputStream").contains("bigtop_runtime_beeline_hivevar")) {
+ results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("--hivevar").addArgument("db=bigtop_runtime_beeline_hivevar").addArgument("-i").addArgument("beeline-hv2.sql", false));
+ } else {
+ results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("--hivevar").addArgument("db=bigtop_runtime_beeline_hivevar").addArgument("-i").addArgument("beeline-hv3.sql", false));
+ }
+
+ results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("--hivevar").addArgument("db=bigtop_runtime_beeline_hivevar").addArgument("-i").addArgument("beeline-hv1.sql", false));
+ String consoleMsg = results.get("outputStream").toLowerCase();
+ Assert.assertEquals("beeline --hivevar FAILED. \n" + results.get("outputStream"), true, consoleMsg.contains("bigtop_runtime_beeline_hivevar") && !consoleMsg.contains("error") && !consoleMsg.contains("exception"));
+ HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("--hivevar").addArgument("db=bigtop_runtime_beeline_hivevar").addArgument("-i").addArgument("beeline-hv4.sql", false));
+ }
+
+ @Test
+ public void checkBeelineFastConnect() {
+ results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("--fastConnect=false"));
+ String consoleMsg = results.get("outputStream").toLowerCase();
+ Assert.assertEquals("beeline --fastConnect FAILED. \n" + results.get("outputStream"), true, consoleMsg.contains("set fastconnect to true to skip"));
+ }
+
+ @Test
+ public void checkBeelineVerbose() {
+ results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("--verbose=true"));
+ String consoleMsg = results.get("outputStream").toLowerCase();
+ Assert.assertEquals("beeline --verbose FAILED." + results.get("outputStream"), true, consoleMsg.contains("issuing: !connect jdbc:hive2:") && !consoleMsg.contains("error") && !consoleMsg.contains("exception"));
+ }
+
+ @Test
+ public void checkBeelineShowHeader() {
+ results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("--showHeader=false").addArgument("-e").addArgument("SHOW DATABASES;"));
+ String consoleMsg = results.get("outputStream").toLowerCase();
+ Assert.assertEquals("beeline --showHeader FAILED. \n" + results.get("outputStream"), true, consoleMsg.contains("default") && !consoleMsg.contains("database_name") && !consoleMsg.contains("error") && !consoleMsg.contains("exception"));
+ }
+
+ @AfterClass
+ public static void cleanup() throws FileNotFoundException {
+ results = HiveHelper.execCommand(new CommandLine("/bin/sh").addArgument("-c").addArgument("rm -rf beeline*.sql", false));
+ results = HiveHelper.execCommand(new CommandLine("/bin/sh").addArgument("-c").addArgument("rm -rf connect.url", false));
+ }
}
diff --git a/bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/TestCLI.java b/bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/TestCLI.java
index c55bb92d..2341e9bd 100644
--- a/bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/TestCLI.java
+++ b/bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/TestCLI.java
@@ -28,186 +28,209 @@ import org.junit.AfterClass;
import org.junit.Assert;
public class TestCLI {
-
- static Map<String, String> results;
- static String db = "javax.jdo.option.ConnectionURL=jdbc:derby:;databaseName=bigtop_metastore_db;create=true";
-
- @BeforeClass
- public static void setup(){
-
- results = HiveHelper.execCommand(new CommandLine("which").addArgument("hive"));
- Assert.assertEquals("Hive is not in the current path.", 0, Integer.parseInt(results.get("exitValue")));
- }
-
- @Test
- public void help(){
- results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-H"));
- //LOG.info(results.get("exitValue"));
- Assert.assertEquals("Error in executing 'hive -H'", 2, Integer.parseInt(results.get("exitValue")));
-
- results = HiveHelper.execCommand(new CommandLine("hive").addArgument("--help"));
- Assert.assertEquals("Error in executing 'hive --help'", 0, Integer.parseInt(results.get("exitValue")));
-
- results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-U"));
- Assert.assertEquals("Unrecognized option should exit 1.", 1, Integer.parseInt(results.get("exitValue")));
- }
-
- @Test
- public void sqlFromCmdLine(){
-
- results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("SHOW DATABASES").addArgument("--hiveconf").addArgument(db));
- Assert.assertEquals("SHOW DATABASES command failed to execute.", 0, Integer.parseInt(results.get("exitValue")));
- if(!results.get("outputStream").contains("bigtop_runtime_hive")){
- results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("CREATE DATABASE bigtop_runtime_hive").addArgument("--hiveconf").addArgument(db));
- Assert.assertEquals("Could not create database bigtop_runtime_hive.", 0, Integer.parseInt(results.get("exitValue")));
- }else{
- results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("DROP DATABASE bigtop_runtime_hive").addArgument("--hiveconf").addArgument(db));
- results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("CREATE DATABASE bigtop_runtime_hive").addArgument("--hiveconf").addArgument(db));
- Assert.assertEquals("Could not create database bigtop_runtime_hive.", 0, Integer.parseInt(results.get("exitValue")));
- }
- results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("DROP DATABASE bigtop_runtime_hive").addArgument("--hiveconf").addArgument(db));
- }
-
- @Test
- public void sqlFromFiles() throws FileNotFoundException{
- try(PrintWriter out = new PrintWriter("hive-f1.sql")){ out.println("SHOW DATABASES;"); }
- try(PrintWriter out = new PrintWriter("hive-f2.sql")){ out.println("CREATE DATABASE bigtop_runtime_hive;"); }
- try(PrintWriter out = new PrintWriter("hive-f3.sql")){ out.println("DROP DATABASE bigtop_runtime_hive;"); out.println("CREATE DATABASE bigtop_runtime_hive;"); }
- try(PrintWriter out = new PrintWriter("hive-f4.sql")){ out.println("DROP DATABASE bigtop_runtime_hive;"); }
- results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-f").addArgument("hive-f1.sql").addArgument("--hiveconf").addArgument(db));
- Assert.assertEquals("SHOW DATABASES command failed to execute.", 0, Integer.parseInt(results.get("exitValue")));
- if(!results.get("outputStream").contains("bigtop_runtime_hive")){
- results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-f").addArgument("hive-f2.sql").addArgument("--hiveconf").addArgument(db));
- Assert.assertEquals("Could not create database bigtop_runtime_hive.", 0, Integer.parseInt(results.get("exitValue")));
- }else{
- results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-f").addArgument("hive-f3.sql").addArgument("--hiveconf").addArgument(db));
- Assert.assertEquals("Could not create database bigtop_runtime_hive.", 0, Integer.parseInt(results.get("exitValue")));
- }
- results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-f").addArgument("hive-f4.sql").addArgument("--hiveconf").addArgument(db));
- }
-
- @Test
- public void silent() {
- results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("SHOW DATABASES").addArgument("-S").addArgument("--hiveconf").addArgument(db));
- Assert.assertEquals("-S option did not work.", new Boolean(false), results.get("outputStream").contains("Time taken:"));
-
- results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("SHOW DATABASES").addArgument("--silent").addArgument("--hiveconf").addArgument(db));
- Assert.assertEquals("--silent option did not work.", new Boolean(false), results.get("outputStream").contains("Time taken:"));
- }
-
- @Test
- public void verbose(){
- results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("SHOW DATABASES").addArgument("-v").addArgument("--hiveconf").addArgument(db));
- Assert.assertEquals("-v option did not work.", new Boolean(true), results.get("outputStream").contains("SHOW DATABASES"));
-
- results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("SHOW DATABASES").addArgument("--verbose").addArgument("--hiveconf").addArgument(db));
- Assert.assertEquals("--verbose option did not work.", new Boolean(true), results.get("outputStream").contains("SHOW DATABASES"));
- }
-
- @Test
- public void initialization() throws FileNotFoundException{
- try(PrintWriter out = new PrintWriter("hive-init1.sql")){ out.println("CREATE DATABASE bigtop_runtime_hive;"); }
- try(PrintWriter out = new PrintWriter("hive-init2.sql")){ out.println("DROP DATABASE bigtop_runtime_hive;"); out.println("CREATE DATABASE bigtop_runtime_hive;"); }
-
- results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("SHOW DATABASES").addArgument("--hiveconf").addArgument(db));
- Assert.assertEquals("SHOW DATABASES command failed to execute.", 0, Integer.parseInt(results.get("exitValue")));
- if(!results.get("outputStream").contains("bigtop_runtime_hive")){
- results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-i").addArgument("hive-init1.sql").addArgument("-e").addArgument("SHOW DATABASES").addArgument("--hiveconf").addArgument(db));
- Assert.assertEquals("Could not create database bigtop_runtime_hive using the init -i option.", 0, Integer.parseInt(results.get("exitValue")));
- Assert.assertEquals("Could not create database bigtop_runtime_hive using the init -i option.", true, results.get("outputStream").contains("bigtop_runtime_hive"));
- }else{
- results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-i").addArgument("hive-init2.sql").addArgument("-e").addArgument("SHOW DATABASES").addArgument("--hiveconf").addArgument(db));
- Assert.assertEquals("Could not create database bigtop_runtime_hive.", 0, Integer.parseInt(results.get("exitValue")));
- Assert.assertEquals("Could not create database bigtop_runtime_hive using the init -i option.", true, results.get("outputStream").contains("bigtop_runtime_hive"));
- }
- results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("DROP DATABASE bigtop_runtime_hive").addArgument("--hiveconf").addArgument(db));
- }
-
- @Test
- public void database(){
-
- results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("SHOW DATABASES").addArgument("--hiveconf").addArgument(db));
- if(!results.get("outputStream").contains("bigtop_runtime_hive")){
- results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("CREATE DATABASE bigtop_runtime_hive").addArgument("--hiveconf").addArgument(db));
- }else{
- results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("DROP DATABASE bigtop_runtime_hive").addArgument("--hiveconf").addArgument(db));
- results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("CREATE DATABASE bigtop_runtime_hive").addArgument("--hiveconf").addArgument(db));
- }
- results = HiveHelper.execCommand(new CommandLine("hive").addArgument("--database").addArgument("bigtop_runtime_hive_1234").addArgument("-e").addArgument("CREATE TABLE bigtop ( MYID INT );").addArgument("--hiveconf").addArgument(db));
- Assert.assertEquals("Non-existent database returned with wrong exit code: "+Integer.parseInt(results.get("exitValue")), 88, Integer.parseInt(results.get("exitValue")));
-
- results = HiveHelper.execCommand(new CommandLine("hive").addArgument("--database").addArgument("bigtop_runtime_hive").addArgument("-e").addArgument("CREATE TABLE bigtop ( MYID INT );").addArgument("--hiveconf").addArgument(db));
- Assert.assertEquals("Failed to create table using --database argument.", 0, Integer.parseInt(results.get("exitValue")));
-
- results = HiveHelper.execCommand(new CommandLine("hive").addArgument("--database").addArgument("bigtop_runtime_hive").addArgument("-e").addArgument("DESCRIBE bigtop").addArgument("--hiveconf").addArgument(db));
- Assert.assertEquals("Failed to get expected column after creating bigtop table using --database argument.", true, results.get("outputStream").contains("myid"));
-
- results = HiveHelper.execCommand(new CommandLine("hive").addArgument("--database").addArgument("bigtop_runtime_hive").addArgument("-e").addArgument("DROP TABLE bigtop").addArgument("--hiveconf").addArgument(db));
- Assert.assertEquals("Failed to create table using --database argument.", 0, Integer.parseInt(results.get("exitValue")));
-
- results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("DROP DATABASE bigtop_runtime_hive").addArgument("--hiveconf").addArgument(db));
- }
-
- @Test
- public void hiveConf(){
- results = HiveHelper.execCommand(new CommandLine("hive").addArgument("--hiveconf").addArgument("hive.root.logger=INFO,console").addArgument("-e").addArgument("SHOW DATABASES").addArgument("--hiveconf").addArgument(db));
- Assert.assertEquals("The --hiveconf option did not work in setting hive.root.logger=INFO,console.", true, results.get("outputStream").contains("INFO parse.ParseDriver: Parsing command: SHOW DATABASES"));
-
- results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-hiveconf").addArgument("hive.root.logger=INFO,console").addArgument("-e").addArgument("SHOW DATABASES").addArgument("--hiveconf").addArgument(db));
- Assert.assertEquals("The -hiveconf variant option did not work in setting hive.root.logger=INFO,console.", true, results.get("outputStream").contains("INFO parse.ParseDriver: Parsing command: SHOW DATABASES"));
- }
-
- @Test
- public void variableSubsitution() throws FileNotFoundException{
- results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("SHOW DATABASES").addArgument("--hiveconf").addArgument(db));
- if(!results.get("outputStream").contains("bigtop_runtime_hive")){
- results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("CREATE DATABASE bigtop_runtime_hive").addArgument("--hiveconf").addArgument(db));
- }else{
- results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("DROP DATABASE bigtop_runtime_hive").addArgument("--hiveconf").addArgument(db));
- results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("CREATE DATABASE bigtop_runtime_hive").addArgument("--hiveconf").addArgument(db));
- }
- try(PrintWriter out = new PrintWriter("hive-define.sql")){ out.println("show ${A};"); out.println("quit;"); }
- results = HiveHelper.execCommand(new CommandLine("/bin/sh").addArgument("-c").addArgument("hive -d A=DATABASES --hiveconf '"+db+"' < hive-define.sql", false));
- Assert.assertEquals("The hive -d A=DATABASES option did not work.", 0, Integer.parseInt(results.get("exitValue")));
- Assert.assertEquals("The hive -d A=DATABASES option did not work.", true, results.get("outputStream").contains("bigtop_runtime_hive"));
-
- results = HiveHelper.execCommand(new CommandLine("/bin/sh").addArgument("-c").addArgument("hive --define A=DATABASES --hiveconf '"+db+"' < hive-define.sql", false));
- Assert.assertEquals("The hive --define A=DATABASES option did not work.", 0, Integer.parseInt(results.get("exitValue")));
- Assert.assertEquals("The hive --define A=DATABASES option did not work.", true, results.get("outputStream").contains("bigtop_runtime_hive"));
-
- results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("DROP DATABASE bigtop_runtime_hive").addArgument("--hiveconf").addArgument(db));
- }
-
- @Test
- public void hiveVar() throws FileNotFoundException{
- results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("SHOW DATABASES").addArgument("--hiveconf").addArgument(db));
- if(!results.get("outputStream").contains("bigtop_runtime_hive")){
- results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("CREATE DATABASE bigtop_runtime_hive").addArgument("--hiveconf").addArgument(db));
- }else{
- results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("DROP DATABASE bigtop_runtime_hive").addArgument("--hiveconf").addArgument(db));
- results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("CREATE DATABASE bigtop_runtime_hive").addArgument("--hiveconf").addArgument(db));
- }
- try(PrintWriter out = new PrintWriter("hive-var.sql")){ out.println("show ${A};"); out.println("quit;"); }
- results = HiveHelper.execCommand(new CommandLine("/bin/sh").addArgument("-c").addArgument("hive --hivevar A=DATABASES --hiveconf '"+db+"' < hive-var.sql", false));
- Assert.assertEquals("The hive --hivevar A=DATABASES option did not work.", 0, Integer.parseInt(results.get("exitValue")));
- Assert.assertEquals("The hive --hivevar A=DATABASES option did not work.", true, results.get("outputStream").contains("bigtop_runtime_hive"));
-
- try(PrintWriter out = new PrintWriter("hiveconf-var.sql")){ out.println("show ${hiveconf:A};"); out.println("quit;"); }
- results = HiveHelper.execCommand(new CommandLine("/bin/sh").addArgument("-c").addArgument("hive --hiveconf A=DATABASES --hiveconf '"+db+"' < hiveconf-var.sql", false));
- Assert.assertEquals("The hive --hiveconf A=DATABASES option did not work.", 0, Integer.parseInt(results.get("exitValue")));
- Assert.assertEquals("The hive --hiveconf A=DATABASES option did not work.", true, results.get("outputStream").contains("bigtop_runtime_hive"));
-
- results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("DROP DATABASE bigtop_runtime_hive").addArgument("--hiveconf").addArgument(db));
- }
-
- @AfterClass
- public static void cleanup(){
- results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("DROP DATABASE bigtop_runtime_hive").addArgument("--hiveconf").addArgument(db));
- results = HiveHelper.execCommand(new CommandLine("/bin/sh").addArgument("-c").addArgument("rm -rf hive-f*.sql", false));
- results = HiveHelper.execCommand(new CommandLine("/bin/sh").addArgument("-c").addArgument("rm -rf hive-init*.sql", false));
- results = HiveHelper.execCommand(new CommandLine("/bin/sh").addArgument("-c").addArgument("rm -rf hive-define.sql", false));
- results = HiveHelper.execCommand(new CommandLine("/bin/sh").addArgument("-c").addArgument("rm -rf hive-var.sql", false));
- results = HiveHelper.execCommand(new CommandLine("/bin/sh").addArgument("-c").addArgument("rm -rf hiveconf-var.sql", false));
- }
-
+
+ static Map<String, String> results;
+ static String db = "javax.jdo.option.ConnectionURL=jdbc:derby:;databaseName=bigtop_metastore_db;create=true";
+
+ @BeforeClass
+ public static void setup() {
+
+ results = HiveHelper.execCommand(new CommandLine("which").addArgument("hive"));
+ Assert.assertEquals("Hive is not in the current path.", 0, Integer.parseInt(results.get("exitValue")));
+ }
+
+ @Test
+ public void help() {
+ results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-H"));
+ //LOG.info(results.get("exitValue"));
+ Assert.assertEquals("Error in executing 'hive -H'", 2, Integer.parseInt(results.get("exitValue")));
+
+ results = HiveHelper.execCommand(new CommandLine("hive").addArgument("--help"));
+ Assert.assertEquals("Error in executing 'hive --help'", 0, Integer.parseInt(results.get("exitValue")));
+
+ results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-U"));
+ Assert.assertEquals("Unrecognized option should exit 1.", 1, Integer.parseInt(results.get("exitValue")));
+ }
+
+ @Test
+ public void sqlFromCmdLine() {
+
+ results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("SHOW DATABASES").addArgument("--hiveconf").addArgument(db));
+ Assert.assertEquals("SHOW DATABASES command failed to execute.", 0, Integer.parseInt(results.get("exitValue")));
+ if (!results.get("outputStream").contains("bigtop_runtime_hive")) {
+ results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("CREATE DATABASE bigtop_runtime_hive").addArgument("--hiveconf").addArgument(db));
+ Assert.assertEquals("Could not create database bigtop_runtime_hive.", 0, Integer.parseInt(results.get("exitValue")));
+ } else {
+ results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("DROP DATABASE bigtop_runtime_hive").addArgument("--hiveconf").addArgument(db));
+ results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("CREATE DATABASE bigtop_runtime_hive").addArgument("--hiveconf").addArgument(db));
+ Assert.assertEquals("Could not create database bigtop_runtime_hive.", 0, Integer.parseInt(results.get("exitValue")));
+ }
+ results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("DROP DATABASE bigtop_runtime_hive").addArgument("--hiveconf").addArgument(db));
+ }
+
+ @Test
+ public void sqlFromFiles() throws FileNotFoundException {
+ try (PrintWriter out = new PrintWriter("hive-f1.sql")) {
+ out.println("SHOW DATABASES;");
+ }
+ try (PrintWriter out = new PrintWriter("hive-f2.sql")) {
+ out.println("CREATE DATABASE bigtop_runtime_hive;");
+ }
+ try (PrintWriter out = new PrintWriter("hive-f3.sql")) {
+ out.println("DROP DATABASE bigtop_runtime_hive;");
+ out.println("CREATE DATABASE bigtop_runtime_hive;");
+ }
+ try (PrintWriter out = new PrintWriter("hive-f4.sql")) {
+ out.println("DROP DATABASE bigtop_runtime_hive;");
+ }
+ results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-f").addArgument("hive-f1.sql").addArgument("--hiveconf").addArgument(db));
+ Assert.assertEquals("SHOW DATABASES command failed to execute.", 0, Integer.parseInt(results.get("exitValue")));
+ if (!results.get("outputStream").contains("bigtop_runtime_hive")) {
+ results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-f").addArgument("hive-f2.sql").addArgument("--hiveconf").addArgument(db));
+ Assert.assertEquals("Could not create database bigtop_runtime_hive.", 0, Integer.parseInt(results.get("exitValue")));
+ } else {
+ results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-f").addArgument("hive-f3.sql").addArgument("--hiveconf").addArgument(db));
+ Assert.assertEquals("Could not create database bigtop_runtime_hive.", 0, Integer.parseInt(results.get("exitValue")));
+ }
+ results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-f").addArgument("hive-f4.sql").addArgument("--hiveconf").addArgument(db));
+ }
+
+ @Test
+ public void silent() {
+ results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("SHOW DATABASES").addArgument("-S").addArgument("--hiveconf").addArgument(db));
+ Assert.assertEquals("-S option did not work.", new Boolean(false), results.get("outputStream").contains("Time taken:"));
+
+ results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("SHOW DATABASES").addArgument("--silent").addArgument("--hiveconf").addArgument(db));
+ Assert.assertEquals("--silent option did not work.", new Boolean(false), results.get("outputStream").contains("Time taken:"));
+ }
+
+ @Test
+ public void verbose() {
+ results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("SHOW DATABASES").addArgument("-v").addArgument("--hiveconf").addArgument(db));
+ Assert.assertEquals("-v option did not work.", new Boolean(true), results.get("outputStream").contains("SHOW DATABASES"));
+
+ results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("SHOW DATABASES").addArgument("--verbose").addArgument("--hiveconf").addArgument(db));
+ Assert.assertEquals("--verbose option did not work.", new Boolean(true), results.get("outputStream").contains("SHOW DATABASES"));
+ }
+
+ @Test
+ public void initialization() throws FileNotFoundException {
+ try (PrintWriter out = new PrintWriter("hive-init1.sql")) {
+ out.println("CREATE DATABASE bigtop_runtime_hive;");
+ }
+ try (PrintWriter out = new PrintWriter("hive-init2.sql")) {
+ out.println("DROP DATABASE bigtop_runtime_hive;");
+ out.println("CREATE DATABASE bigtop_runtime_hive;");
+ }
+
+ results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("SHOW DATABASES").addArgument("--hiveconf").addArgument(db));
+ Assert.assertEquals("SHOW DATABASES command failed to execute.", 0, Integer.parseInt(results.get("exitValue")));
+ if (!results.get("outputStream").contains("bigtop_runtime_hive")) {
+ results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-i").addArgument("hive-init1.sql").addArgument("-e").addArgument("SHOW DATABASES").addArgument("--hiveconf").addArgument(db));
+ Assert.assertEquals("Could not create database bigtop_runtime_hive using the init -i option.", 0, Integer.parseInt(results.get("exitValue")));
+ Assert.assertEquals("Could not create database bigtop_runtime_hive using the init -i option.", true, results.get("outputStream").contains("bigtop_runtime_hive"));
+ } else {
+ results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-i").addArgument("hive-init2.sql").addArgument("-e").addArgument("SHOW DATABASES").addArgument("--hiveconf").addArgument(db));
+ Assert.assertEquals("Could not create database bigtop_runtime_hive.", 0, Integer.parseInt(results.get("exitValue")));
+ Assert.assertEquals("Could not create database bigtop_runtime_hive using the init -i option.", true, results.get("outputStream").contains("bigtop_runtime_hive"));
+ }
+ results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("DROP DATABASE bigtop_runtime_hive").addArgument("--hiveconf").addArgument(db));
+ }
+
+ @Test
+ public void database() {
+
+ results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("SHOW DATABASES").addArgument("--hiveconf").addArgument(db));
+ if (!results.get("outputStream").contains("bigtop_runtime_hive")) {
+ results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("CREATE DATABASE bigtop_runtime_hive").addArgument("--hiveconf").addArgument(db));
+ } else {
+ results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("DROP DATABASE bigtop_runtime_hive").addArgument("--hiveconf").addArgument(db));
+ results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("CREATE DATABASE bigtop_runtime_hive").addArgument("--hiveconf").addArgument(db));
+ }
+ results = HiveHelper.execCommand(new CommandLine("hive").addArgument("--database").addArgument("bigtop_runtime_hive_1234").addArgument("-e").addArgument("CREATE TABLE bigtop ( MYID INT );").addArgument("--hiveconf").addArgument(db));
+ Assert.assertEquals("Non-existent database returned with wrong exit code: " + Integer.parseInt(results.get("exitValue")), 88, Integer.parseInt(results.get("exitValue")));
+
+ results = HiveHelper.execCommand(new CommandLine("hive").addArgument("--database").addArgument("bigtop_runtime_hive").addArgument("-e").addArgument("CREATE TABLE bigtop ( MYID INT );").addArgument("--hiveconf").addArgument(db));
+ Assert.assertEquals("Failed to create table using --database argument.", 0, Integer.parseInt(results.get("exitValue")));
+
+ results = HiveHelper.execCommand(new CommandLine("hive").addArgument("--database").addArgument("bigtop_runtime_hive").addArgument("-e").addArgument("DESCRIBE bigtop").addArgument("--hiveconf").addArgument(db));
+ Assert.assertEquals("Failed to get expected column after creating bigtop table using --database argument.", true, results.get("outputStream").contains("myid"));
+
+ results = HiveHelper.execCommand(new CommandLine("hive").addArgument("--database").addArgument("bigtop_runtime_hive").addArgument("-e").addArgument("DROP TABLE bigtop").addArgument("--hiveconf").addArgument(db));
+ Assert.assertEquals("Failed to create table using --database argument.", 0, Integer.parseInt(results.get("exitValue")));
+
+ results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("DROP DATABASE bigtop_runtime_hive").addArgument("--hiveconf").addArgument(db));
+ }
+
+ @Test
+ public void hiveConf() {
+ results = HiveHelper.execCommand(new CommandLine("hive").addArgument("--hiveconf").addArgument("hive.root.logger=INFO,console").addArgument("-e").addArgument("SHOW DATABASES").addArgument("--hiveconf").addArgument(db));
+ Assert.assertEquals("The --hiveconf option did not work in setting hive.root.logger=INFO,console.", true, results.get("outputStream").contains("INFO parse.ParseDriver: Parsing command: SHOW DATABASES"));
+
+ results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-hiveconf").addArgument("hive.root.logger=INFO,console").addArgument("-e").addArgument("SHOW DATABASES").addArgument("--hiveconf").addArgument(db));
+ Assert.assertEquals("The -hiveconf variant option did not work in setting hive.root.logger=INFO,console.", true, results.get("outputStream").contains("INFO parse.ParseDriver: Parsing command: SHOW DATABASES"));
+ }
+
+ @Test
+ public void variableSubsitution() throws FileNotFoundException {
+ results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("SHOW DATABASES").addArgument("--hiveconf").addArgument(db));
+ if (!results.get("outputStream").contains("bigtop_runtime_hive")) {
+ results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("CREATE DATABASE bigtop_runtime_hive").addArgument("--hiveconf").addArgument(db));
+ } else {
+ results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("DROP DATABASE bigtop_runtime_hive").addArgument("--hiveconf").addArgument(db));
+ results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("CREATE DATABASE bigtop_runtime_hive").addArgument("--hiveconf").addArgument(db));
+ }
+ try (PrintWriter out = new PrintWriter("hive-define.sql")) {
+ out.println("show ${A};");
+ out.println("quit;");
+ }
+ results = HiveHelper.execCommand(new CommandLine("/bin/sh").addArgument("-c").addArgument("hive -d A=DATABASES --hiveconf '" + db + "' < hive-define.sql", false));
+ Assert.assertEquals("The hive -d A=DATABASES option did not work.", 0, Integer.parseInt(results.get("exitValue")));
+ Assert.assertEquals("The hive -d A=DATABASES option did not work.", true, results.get("outputStream").contains("bigtop_runtime_hive"));
+
+ results = HiveHelper.execCommand(new CommandLine("/bin/sh").addArgument("-c").addArgument("hive --define A=DATABASES --hiveconf '" + db + "' < hive-define.sql", false));
+ Assert.assertEquals("The hive --define A=DATABASES option did not work.", 0, Integer.parseInt(results.get("exitValue")));
+ Assert.assertEquals("The hive --define A=DATABASES option did not work.", true, results.get("outputStream").contains("bigtop_runtime_hive"));
+
+ results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("DROP DATABASE bigtop_runtime_hive").addArgument("--hiveconf").addArgument(db));
+ }
+
+ @Test
+ public void hiveVar() throws FileNotFoundException {
+ results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("SHOW DATABASES").addArgument("--hiveconf").addArgument(db));
+ if (!results.get("outputStream").contains("bigtop_runtime_hive")) {
+ results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("CREATE DATABASE bigtop_runtime_hive").addArgument("--hiveconf").addArgument(db));
+ } else {
+ results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("DROP DATABASE bigtop_runtime_hive").addArgument("--hiveconf").addArgument(db));
+ results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("CREATE DATABASE bigtop_runtime_hive").addArgument("--hiveconf").addArgument(db));
+ }
+ try (PrintWriter out = new PrintWriter("hive-var.sql")) {
+ out.println("show ${A};");
+ out.println("quit;");
+ }
+ results = HiveHelper.execCommand(new CommandLine("/bin/sh").addArgument("-c").addArgument("hive --hivevar A=DATABASES --hiveconf '" + db + "' < hive-var.sql", false));
+ Assert.assertEquals("The hive --hivevar A=DATABASES option did not work.", 0, Integer.parseInt(results.get("exitValue")));
+ Assert.assertEquals("The hive --hivevar A=DATABASES option did not work.", true, results.get("outputStream").contains("bigtop_runtime_hive"));
+
+ try (PrintWriter out = new PrintWriter("hiveconf-var.sql")) {
+ out.println("show ${hiveconf:A};");
+ out.println("quit;");
+ }
+ results = HiveHelper.execCommand(new CommandLine("/bin/sh").addArgument("-c").addArgument("hive --hiveconf A=DATABASES --hiveconf '" + db + "' < hiveconf-var.sql", false));
+ Assert.assertEquals("The hive --hiveconf A=DATABASES option did not work.", 0, Integer.parseInt(results.get("exitValue")));
+ Assert.assertEquals("The hive --hiveconf A=DATABASES option did not work.", true, results.get("outputStream").contains("bigtop_runtime_hive"));
+
+ results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("DROP DATABASE bigtop_runtime_hive").addArgument("--hiveconf").addArgument(db));
+ }
+
+ @AfterClass
+ public static void cleanup() {
+ results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("DROP DATABASE bigtop_runtime_hive").addArgument("--hiveconf").addArgument(db));
+ results = HiveHelper.execCommand(new CommandLine("/bin/sh").addArgument("-c").addArgument("rm -rf hive-f*.sql", false));
+ results = HiveHelper.execCommand(new CommandLine("/bin/sh").addArgument("-c").addArgument("rm -rf hive-init*.sql", false));
+ results = HiveHelper.execCommand(new CommandLine("/bin/sh").addArgument("-c").addArgument("rm -rf hive-define.sql", false));
+ results = HiveHelper.execCommand(new CommandLine("/bin/sh").addArgument("-c").addArgument("rm -rf hive-var.sql", false));
+ results = HiveHelper.execCommand(new CommandLine("/bin/sh").addArgument("-c").addArgument("rm -rf hiveconf-var.sql", false));
+ }
+
}
diff --git a/bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/TestHCatalog.java b/bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/TestHCatalog.java
index 8bf7141d..bb4287f9 100644
--- a/bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/TestHCatalog.java
+++ b/bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/TestHCatalog.java
@@ -51,108 +51,108 @@ import java.util.Random;
public class TestHCatalog {
- private static final String JOBJAR = "bigtop.test.hive.hcat.job.jar";
- private static final String HCATCORE = "bigtop.test.hive.hcat.core.jar";
-
- private static final Log LOG = LogFactory.getLog(TestHCatalog.class.getName());
-
- private static IMetaStoreClient client = null;
- private static HiveConf conf;
- private static HCatSchema inputSchema;
- private static HCatSchema outputSchema;
-
- private Random rand;
-
- @BeforeClass
- public static void connect() throws MetaException {
- if (JdbcConnector.testActive(JdbcConnector.TEST_HCATALOG, "Test HCatalog ")) {
- String hiveConfDir = JdbcConnector.getProperty(JdbcConnector.HIVE_CONF_DIR,
- "Hive conf directory ");
- String hadoopConfDir = JdbcConnector.getProperty(JdbcConnector.HADOOP_CONF_DIR,
- "Hadoop conf directory ");
- conf = new HiveConf();
- String fileSep = System.getProperty("file.separator");
- conf.addResource(new Path(hadoopConfDir + fileSep + "core-site.xml"));
- conf.addResource(new Path(hadoopConfDir + fileSep + "hdfs-site.xml"));
- conf.addResource(new Path(hadoopConfDir + fileSep + "yarn-site.xml"));
- conf.addResource(new Path(hadoopConfDir + fileSep + "mapred-site.xml"));
- conf.addResource(new Path(hiveConfDir + fileSep + "hive-site.xml"));
- client = new HiveMetaStoreClient(conf);
+ private static final String JOBJAR = "bigtop.test.hive.hcat.job.jar";
+ private static final String HCATCORE = "bigtop.test.hive.hcat.core.jar";
+
+ private static final Log LOG = LogFactory.getLog(TestHCatalog.class.getName());
+
+ private static IMetaStoreClient client = null;
+ private static HiveConf conf;
+ private static HCatSchema inputSchema;
+ private static HCatSchema outputSchema;
+
+ private Random rand;
+
+ @BeforeClass
+ public static void connect() throws MetaException {
+ if (JdbcConnector.testActive(JdbcConnector.TEST_HCATALOG, "Test HCatalog ")) {
+ String hiveConfDir = JdbcConnector.getProperty(JdbcConnector.HIVE_CONF_DIR,
+ "Hive conf directory ");
+ String hadoopConfDir = JdbcConnector.getProperty(JdbcConnector.HADOOP_CONF_DIR,
+ "Hadoop conf directory ");
+ conf = new HiveConf();
+ String fileSep = System.getProperty("file.separator");
+ conf.addResource(new Path(hadoopConfDir + fileSep + "core-site.xml"));
+ conf.addResource(new Path(hadoopConfDir + fileSep + "hdfs-site.xml"));
+ conf.addResource(new Path(hadoopConfDir + fileSep + "yarn-site.xml"));
+ conf.addResource(new Path(hadoopConfDir + fileSep + "mapred-site.xml"));
+ conf.addResource(new Path(hiveConfDir + fileSep + "hive-site.xml"));
+ client = new HiveMetaStoreClient(conf);
+
+ }
+ }
+
+ @Before
+ public void checkIfActive() {
+ Assume.assumeTrue(JdbcConnector.testActive(JdbcConnector.TEST_HCATALOG, "Test HCatalog "));
+ rand = new Random();
+ }
+ @Test
+ public void hcatInputFormatOutputFormat() throws TException, IOException, ClassNotFoundException,
+ InterruptedException, URISyntaxException {
+ // Create a table to write to
+ final String inputTable = "bigtop_hcat_input_table_" + rand.nextInt(Integer.MAX_VALUE);
+ SerDeInfo serde = new SerDeInfo("default_serde",
+ conf.getVar(HiveConf.ConfVars.HIVEDEFAULTSERDE), new HashMap<String, String>());
+ FieldSchema schema = new FieldSchema("line", "string", "");
+ inputSchema = new HCatSchema(Collections.singletonList(new HCatFieldSchema(schema.getName(),
+ HCatFieldSchema.Type.STRING, schema.getComment())));
+ StorageDescriptor sd = new StorageDescriptor(Collections.singletonList(schema), null,
+ "org.apache.hadoop.mapred.TextInputFormat",
+ "org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat", false, 0, serde, null, null,
+ new HashMap<String, String>());
+ Table table = new Table(inputTable, "default", "me", 0, 0, 0, sd, null,
+ new HashMap<String, String>(), null, null, TableType.MANAGED_TABLE.toString());
+ client.createTable(table);
+
+ final String outputTable = "bigtop_hcat_output_table_" + rand.nextInt(Integer.MAX_VALUE);
+ sd = new StorageDescriptor(Arrays.asList(
+ new FieldSchema("word", "string", ""),
+ new FieldSchema("count", "int", "")),
+ null, "org.apache.hadoop.mapred.TextInputFormat",
+ "org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat", false, 0, serde, null, null,
+ new HashMap<String, String>());
+ table = new Table(outputTable, "default", "me", 0, 0, 0, sd, null,
+ new HashMap<String, String>(), null, null, TableType.MANAGED_TABLE.toString());
+ client.createTable(table);
+ outputSchema = new HCatSchema(Arrays.asList(
+ new HCatFieldSchema("word", HCatFieldSchema.Type.STRING, ""),
+ new HCatFieldSchema("count", HCatFieldSchema.Type.INT, "")));
+
+ // LATER Could I use HCatWriter here and the reader to read it?
+ // Write some stuff into a file in the location of the table
+ table = client.getTable("default", inputTable);
+ String inputFile = table.getSd().getLocation() + "/input";
+ Path inputPath = new Path(inputFile);
+ FileSystem fs = FileSystem.get(conf);
+ FSDataOutputStream out = fs.create(inputPath);
+ out.writeChars("Mary had a little lamb\n");
+ out.writeChars("its fleece was white as snow\n");
+ out.writeChars("and everywhere that Mary went\n");
+ out.writeChars("the lamb was sure to go\n");
+ out.close();
+
+ Map<String, String> env = new HashMap<>();
+ env.put("HADOOP_CLASSPATH", System.getProperty(HCATCORE, ""));
+ Map<String, String> results = HiveHelper.execCommand(new CommandLine("hive")
+ .addArgument("--service")
+ .addArgument("jar")
+ .addArgument(System.getProperty(JOBJAR))
+ .addArgument(HCatalogMR.class.getName())
+ .addArgument("-it")
+ .addArgument(inputTable)
+ .addArgument("-ot")
+ .addArgument(outputTable)
+ .addArgument("-is")
+ .addArgument(inputSchema.getSchemaAsTypeString())
+ .addArgument("-os")
+ .addArgument(outputSchema.getSchemaAsTypeString()), env);
+ LOG.info(results.toString());
+ Assert.assertEquals("HCat job failed", 0, Integer.parseInt(results.get("exitValue")));
+
+ client.dropTable("default", inputTable);
+ client.dropTable("default", outputTable);
}
- }
-
- @Before
- public void checkIfActive() {
- Assume.assumeTrue(JdbcConnector.testActive(JdbcConnector.TEST_HCATALOG, "Test HCatalog "));
- rand = new Random();
- }
-
- @Test
- public void hcatInputFormatOutputFormat() throws TException, IOException, ClassNotFoundException,
- InterruptedException, URISyntaxException {
- // Create a table to write to
- final String inputTable = "bigtop_hcat_input_table_" + rand.nextInt(Integer.MAX_VALUE);
- SerDeInfo serde = new SerDeInfo("default_serde",
- conf.getVar(HiveConf.ConfVars.HIVEDEFAULTSERDE), new HashMap<String, String>());
- FieldSchema schema = new FieldSchema("line", "string", "");
- inputSchema = new HCatSchema(Collections.singletonList(new HCatFieldSchema(schema.getName(),
- HCatFieldSchema.Type.STRING, schema.getComment())));
- StorageDescriptor sd = new StorageDescriptor(Collections.singletonList(schema), null,
- "org.apache.hadoop.mapred.TextInputFormat",
- "org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat", false, 0, serde, null, null,
- new HashMap<String, String>());
- Table table = new Table(inputTable, "default", "me", 0, 0, 0, sd, null,
- new HashMap<String, String>(), null, null, TableType.MANAGED_TABLE.toString());
- client.createTable(table);
-
- final String outputTable = "bigtop_hcat_output_table_" + rand.nextInt(Integer.MAX_VALUE);
- sd = new StorageDescriptor(Arrays.asList(
- new FieldSchema("word", "string", ""),
- new FieldSchema("count", "int", "")),
- null, "org.apache.hadoop.mapred.TextInputFormat",
- "org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat", false, 0, serde, null, null,
- new HashMap<String, String>());
- table = new Table(outputTable, "default", "me", 0, 0, 0, sd, null,
- new HashMap<String, String>(), null, null, TableType.MANAGED_TABLE.toString());
- client.createTable(table);
- outputSchema = new HCatSchema(Arrays.asList(
- new HCatFieldSchema("word", HCatFieldSchema.Type.STRING, ""),
- new HCatFieldSchema("count", HCatFieldSchema.Type.INT, "")));
-
- // LATER Could I use HCatWriter here and the reader to read it?
- // Write some stuff into a file in the location of the table
- table = client.getTable("default", inputTable);
- String inputFile = table.getSd().getLocation() + "/input";
- Path inputPath = new Path(inputFile);
- FileSystem fs = FileSystem.get(conf);
- FSDataOutputStream out = fs.create(inputPath);
- out.writeChars("Mary had a little lamb\n");
- out.writeChars("its fleece was white as snow\n");
- out.writeChars("and everywhere that Mary went\n");
- out.writeChars("the lamb was sure to go\n");
- out.close();
-
- Map<String, String> env = new HashMap<>();
- env.put("HADOOP_CLASSPATH", System.getProperty(HCATCORE, ""));
- Map<String, String> results = HiveHelper.execCommand(new CommandLine("hive")
- .addArgument("--service")
- .addArgument("jar")
- .addArgument(System.getProperty(JOBJAR))
- .addArgument(HCatalogMR.class.getName())
- .addArgument("-it")
- .addArgument(inputTable)
- .addArgument("-ot")
- .addArgument(outputTable)
- .addArgument("-is")
- .addArgument(inputSchema.getSchemaAsTypeString())
- .addArgument("-os")
- .addArgument(outputSchema.getSchemaAsTypeString()), env);
- LOG.info(results.toString());
- Assert.assertEquals("HCat job failed", 0, Integer.parseInt(results.get("exitValue")));
-
- client.dropTable("default", inputTable);
- client.dropTable("default", outputTable);
- }
}
diff --git a/bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/TestJdbc.java b/bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/TestJdbc.java
index a5a896a8..63566400 100644
--- a/bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/TestJdbc.java
+++ b/bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/TestJdbc.java
@@ -32,514 +32,516 @@ import java.sql.Statement;
import java.sql.Types;
public class TestJdbc extends JdbcConnector {
- private static final Log LOG = LogFactory.getLog(TestJdbc.class.getName());
-
- /**
- * Test simple non-statement related class. setSchema is tested elsewhere because there's work
- * to do for that one. Similarly with getMetadata.
- * @throws SQLException
- */
- @Test
- public void nonStatementCalls() throws SQLException {
- conn.clearWarnings();
-
- boolean isAutoCommit = conn.getAutoCommit();
- LOG.debug("Auto commit is " + isAutoCommit);
-
- String catalog = conn.getCatalog();
- LOG.debug("Catalog is " + catalog);
-
- String schema = conn.getSchema();
- LOG.debug("Schema is " + schema);
-
- int txnIsolation = conn.getTransactionIsolation();
- LOG.debug("Transaction Isolation is " + txnIsolation);
-
- SQLWarning warning = conn.getWarnings();
- while (warning != null) {
- LOG.debug("Found a warning: " + warning.getMessage());
- warning = warning.getNextWarning();
+ private static final Log LOG = LogFactory.getLog(TestJdbc.class.getName());
+
+ /**
+ * Test simple non-statement related class. setSchema is tested elsewhere because there's work
+ * to do for that one. Similarly with getMetadata.
+ *
+ * @throws SQLException
+ */
+ @Test
+ public void nonStatementCalls() throws SQLException {
+ conn.clearWarnings();
+
+ boolean isAutoCommit = conn.getAutoCommit();
+ LOG.debug("Auto commit is " + isAutoCommit);
+
+ String catalog = conn.getCatalog();
+ LOG.debug("Catalog is " + catalog);
+
+ String schema = conn.getSchema();
+ LOG.debug("Schema is " + schema);
+
+ int txnIsolation = conn.getTransactionIsolation();
+ LOG.debug("Transaction Isolation is " + txnIsolation);
+
+ SQLWarning warning = conn.getWarnings();
+ while (warning != null) {
+ LOG.debug("Found a warning: " + warning.getMessage());
+ warning = warning.getNextWarning();
+ }
+
+ boolean closed = conn.isClosed();
+ LOG.debug("Is closed? " + closed);
+
+ boolean readOnly = conn.isReadOnly();
+ LOG.debug("Is read only?" + readOnly);
+
+ // Hive doesn't support catalogs, so setting this to whatever should be fine. If we have
+ // non-Hive systems trying to pass this setting it to a non-valid catalog name may cause
+ // issues, so we may need to make this value configurable or something.
+ conn.setCatalog("fred");
}
- boolean closed = conn.isClosed();
- LOG.debug("Is closed? " + closed);
+ /**
+ * Test simple DatabaseMetaData calls. getColumns is tested elsewhere, as we need to call
+ * that on a valid table. Same with getFunctions.
+ *
+ * @throws SQLException
+ */
+ @Test
+ public void databaseMetaDataCalls() throws SQLException {
+ DatabaseMetaData md = conn.getMetaData();
- boolean readOnly = conn.isReadOnly();
- LOG.debug("Is read only?" + readOnly);
+ boolean boolrc = md.allTablesAreSelectable();
+ LOG.debug("All tables are selectable? " + boolrc);
- // Hive doesn't support catalogs, so setting this to whatever should be fine. If we have
- // non-Hive systems trying to pass this setting it to a non-valid catalog name may cause
- // issues, so we may need to make this value configurable or something.
- conn.setCatalog("fred");
- }
-
- /**
- * Test simple DatabaseMetaData calls. getColumns is tested elsewhere, as we need to call
- * that on a valid table. Same with getFunctions.
- * @throws SQLException
- */
- @Test
- public void databaseMetaDataCalls() throws SQLException {
- DatabaseMetaData md = conn.getMetaData();
-
- boolean boolrc = md.allTablesAreSelectable();
- LOG.debug("All tables are selectable? " + boolrc);
-
- String strrc = md.getCatalogSeparator();
- LOG.debug("Catalog separator " + strrc);
-
- strrc = md.getCatalogTerm();
- LOG.debug("Catalog term " + strrc);
-
- ResultSet rs = md.getCatalogs();
- while (rs.next()) {
- strrc = rs.getString(1);
- LOG.debug("Found catalog " + strrc);
- }
+ String strrc = md.getCatalogSeparator();
+ LOG.debug("Catalog separator " + strrc);
- Connection c = md.getConnection();
+ strrc = md.getCatalogTerm();
+ LOG.debug("Catalog term " + strrc);
- int intrc = md.getDatabaseMajorVersion();
- LOG.debug("DB major version is " + intrc);
+ ResultSet rs = md.getCatalogs();
+ while (rs.next()) {
+ strrc = rs.getString(1);
+ LOG.debug("Found catalog " + strrc);
+ }
- intrc = md.getDatabaseMinorVersion();
- LOG.debug("DB minor version is " + intrc);
+ Connection c = md.getConnection();
- strrc = md.getDatabaseProductName();
- LOG.debug("DB product name is " + strrc);
+ int intrc = md.getDatabaseMajorVersion();
+ LOG.debug("DB major version is " + intrc);
- strrc = md.getDatabaseProductVersion();
- LOG.debug("DB product version is " + strrc);
+ intrc = md.getDatabaseMinorVersion();
+ LOG.debug("DB minor version is " + intrc);
- intrc = md.getDefaultTransactionIsolation();
- LOG.debug("Default transaction isolation is " + intrc);
+ strrc = md.getDatabaseProductName();
+ LOG.debug("DB product name is " + strrc);
- intrc = md.getDriverMajorVersion();
- LOG.debug("Driver major version is " + intrc);
+ strrc = md.getDatabaseProductVersion();
+ LOG.debug("DB product version is " + strrc);
- intrc = md.getDriverMinorVersion();
- LOG.debug("Driver minor version is " + intrc);
+ intrc = md.getDefaultTransactionIsolation();
+ LOG.debug("Default transaction isolation is " + intrc);
- strrc = md.getDriverName();
- LOG.debug("Driver name is " + strrc);
+ intrc = md.getDriverMajorVersion();
+ LOG.debug("Driver major version is " + intrc);
- strrc = md.getDriverVersion();
- LOG.debug("Driver version is " + strrc);
+ intrc = md.getDriverMinorVersion();
+ LOG.debug("Driver minor version is " + intrc);
- strrc = md.getExtraNameCharacters();
- LOG.debug("Extra name characters is " + strrc);
+ strrc = md.getDriverName();
+ LOG.debug("Driver name is " + strrc);
- strrc = md.getIdentifierQuoteString();
- LOG.debug("Identifier quote string is " + strrc);
+ strrc = md.getDriverVersion();
+ LOG.debug("Driver version is " + strrc);
- // In Hive 1.2 this always returns an empty RS
- rs = md.getImportedKeys("a", "b", "d");
+ strrc = md.getExtraNameCharacters();
+ LOG.debug("Extra name characters is " + strrc);
- // In Hive 1.2 this always returns an empty RS
- rs = md.getIndexInfo("a", "b", "d", true, true);
+ strrc = md.getIdentifierQuoteString();
+ LOG.debug("Identifier quote string is " + strrc);
- intrc = md.getJDBCMajorVersion();
- LOG.debug("JDBC major version is " + intrc);
+ // In Hive 1.2 this always returns an empty RS
+ rs = md.getImportedKeys("a", "b", "d");
- intrc = md.getJDBCMinorVersion();
- LOG.debug("JDBC minor version is " + intrc);
+ // In Hive 1.2 this always returns an empty RS
+ rs = md.getIndexInfo("a", "b", "d", true, true);
- intrc = md.getMaxColumnNameLength();
- LOG.debug("Maximum column name length is " + intrc);
+ intrc = md.getJDBCMajorVersion();
+ LOG.debug("JDBC major version is " + intrc);
- strrc = md.getNumericFunctions();
- LOG.debug("Numeric functions are " + strrc);
+ intrc = md.getJDBCMinorVersion();
+ LOG.debug("JDBC minor version is " + intrc);
- // In Hive 1.2 this always returns an empty RS
- rs = md.getPrimaryKeys("a", "b", "d");
+ intrc = md.getMaxColumnNameLength();
+ LOG.debug("Maximum column name length is " + intrc);
- // In Hive 1.2 this always returns an empty RS
- rs = md.getProcedureColumns("a", "b", "d", "e");
+ strrc = md.getNumericFunctions();
+ LOG.debug("Numeric functions are " + strrc);
- strrc = md.getProcedureTerm();
- LOG.debug("Procedures are called " + strrc);
+ // In Hive 1.2 this always returns an empty RS
+ rs = md.getPrimaryKeys("a", "b", "d");
- // In Hive 1.2 this always returns an empty RS
- rs = md.getProcedures("a", "b", "d");
-
- strrc = md.getSchemaTerm();
- LOG.debug("Schemas are called " + strrc);
-
- rs = md.getSchemas();
- while (rs.next()) {
- strrc = rs.getString(1);
- LOG.debug("Found schema " + strrc);
- }
-
- strrc = md.getSearchStringEscape();
- LOG.debug("Search string escape is " + strrc);
-
- strrc = md.getStringFunctions();
- LOG.debug("String functions are " + strrc);
-
- strrc = md.getSystemFunctions();
- LOG.debug("System functions are " + strrc);
-
- rs = md.getTableTypes();
- while (rs.next()) {
- strrc = rs.getString(1);
- LOG.debug("Found table type " + strrc);
- }
-
- strrc = md.getTimeDateFunctions();
- LOG.debug("Time/date functions are " + strrc);
-
- rs = md.getTypeInfo();
- while (rs.next()) {
- strrc = rs.getString(1);
- LOG.debug("Found type " + strrc);
- }
+ // In Hive 1.2 this always returns an empty RS
+ rs = md.getProcedureColumns("a", "b", "d", "e");
- // In Hive 1.2 this always returns an empty RS
- rs = md.getUDTs("a", "b", "d", null);
+ strrc = md.getProcedureTerm();
+ LOG.debug("Procedures are called " + strrc);
- boolrc = md.supportsAlterTableWithAddColumn();
- LOG.debug("Supports alter table with add column? " + boolrc);
+ // In Hive 1.2 this always returns an empty RS
+ rs = md.getProcedures("a", "b", "d");
- boolrc = md.supportsAlterTableWithDropColumn();
- LOG.debug("Supports alter table with drop column? " + boolrc);
+ strrc = md.getSchemaTerm();
+ LOG.debug("Schemas are called " + strrc);
- boolrc = md.supportsBatchUpdates();
- LOG.debug("Supports batch updates? " + boolrc);
+ rs = md.getSchemas();
+ while (rs.next()) {
+ strrc = rs.getString(1);
+ LOG.debug("Found schema " + strrc);
+ }
- boolrc = md.supportsCatalogsInDataManipulation();
- LOG.debug("Supports catalogs in data manipulation? " + boolrc);
+ strrc = md.getSearchStringEscape();
+ LOG.debug("Search string escape is " + strrc);
- boolrc = md.supportsCatalogsInIndexDefinitions();
- LOG.debug("Supports catalogs in index definition? " + boolrc);
+ strrc = md.getStringFunctions();
+ LOG.debug("String functions are " + strrc);
- boolrc = md.supportsCatalogsInPrivilegeDefinitions();
- LOG.debug("Supports catalogs in privilege definition? " + boolrc);
+ strrc = md.getSystemFunctions();
+ LOG.debug("System functions are " + strrc);
- boolrc = md.supportsCatalogsInProcedureCalls();
- LOG.debug("Supports catalogs in procedure calls? " + boolrc);
+ rs = md.getTableTypes();
+ while (rs.next()) {
+ strrc = rs.getString(1);
+ LOG.debug("Found table type " + strrc);
+ }
- boolrc = md.supportsCatalogsInTableDefinitions();
- LOG.debug("Supports catalogs in table definition? " + boolrc);
+ strrc = md.getTimeDateFunctions();
+ LOG.debug("Time/date functions are " + strrc);
- boolrc = md.supportsColumnAliasing();
- LOG.debug("Supports column aliasing? " + boolrc);
+ rs = md.getTypeInfo();
+ while (rs.next()) {
+ strrc = rs.getString(1);
+ LOG.debug("Found type " + strrc);
+ }
- boolrc = md.supportsFullOuterJoins();
- LOG.debug("Supports full outer joins? " + boolrc);
+ // In Hive 1.2 this always returns an empty RS
+ rs = md.getUDTs("a", "b", "d", null);
- boolrc = md.supportsGroupBy();
- LOG.debug("Supports group by? " + boolrc);
+ boolrc = md.supportsAlterTableWithAddColumn();
+ LOG.debug("Supports alter table with add column? " + boolrc);
- boolrc = md.supportsLimitedOuterJoins();
- LOG.debug("Supports limited outer joins? " + boolrc);
+ boolrc = md.supportsAlterTableWithDropColumn();
+ LOG.debug("Supports alter table with drop column? " + boolrc);
- boolrc = md.supportsMultipleResultSets();
- LOG.debug("Supports limited outer joins? " + boolrc);
+ boolrc = md.supportsBatchUpdates();
+ LOG.debug("Supports batch updates? " + boolrc);
- boolrc = md.supportsNonNullableColumns();
- LOG.debug("Supports non-nullable columns? " + boolrc);
+ boolrc = md.supportsCatalogsInDataManipulation();
+ LOG.debug("Supports catalogs in data manipulation? " + boolrc);
- boolrc = md.supportsOuterJoins();
- LOG.debug("Supports outer joins? " + boolrc);
+ boolrc = md.supportsCatalogsInIndexDefinitions();
+ LOG.debug("Supports catalogs in index definition? " + boolrc);
- boolrc = md.supportsPositionedDelete();
- LOG.debug("Supports positioned delete? " + boolrc);
+ boolrc = md.supportsCatalogsInPrivilegeDefinitions();
+ LOG.debug("Supports catalogs in privilege definition? " + boolrc);
- boolrc = md.supportsPositionedUpdate();
- LOG.debug("Supports positioned update? " + boolrc);
+ boolrc = md.supportsCatalogsInProcedureCalls();
+ LOG.debug("Supports catalogs in procedure calls? " + boolrc);
- boolrc = md.supportsResultSetHoldability(ResultSet.HOLD_CURSORS_OVER_COMMIT);
- LOG.debug("Supports result set holdability? " + boolrc);
+ boolrc = md.supportsCatalogsInTableDefinitions();
+ LOG.debug("Supports catalogs in table definition? " + boolrc);
- boolrc = md.supportsResultSetType(ResultSet.HOLD_CURSORS_OVER_COMMIT);
- LOG.debug("Supports result set type? " + boolrc);
+ boolrc = md.supportsColumnAliasing();
+ LOG.debug("Supports column aliasing? " + boolrc);
- boolrc = md.supportsSavepoints();
- LOG.debug("Supports savepoints? " + boolrc);
+ boolrc = md.supportsFullOuterJoins();
+ LOG.debug("Supports full outer joins? " + boolrc);
- boolrc = md.supportsSchemasInDataManipulation();
- LOG.debug("Supports schemas in data manipulation? " + boolrc);
+ boolrc = md.supportsGroupBy();
+ LOG.debug("Supports group by? " + boolrc);
- boolrc = md.supportsSchemasInIndexDefinitions();
- LOG.debug("Supports schemas in index definitions? " + boolrc);
+ boolrc = md.supportsLimitedOuterJoins();
+ LOG.debug("Supports limited outer joins? " + boolrc);
- boolrc = md.supportsSchemasInPrivilegeDefinitions();
- LOG.debug("Supports schemas in privilege definitions? " + boolrc);
+ boolrc = md.supportsMultipleResultSets();
+ LOG.debug("Supports limited outer joins? " + boolrc);
- boolrc = md.supportsSchemasInProcedureCalls();
- LOG.debug("Supports schemas in procedure calls? " + boolrc);
+ boolrc = md.supportsNonNullableColumns();
+ LOG.debug("Supports non-nullable columns? " + boolrc);
- boolrc = md.supportsSchemasInTableDefinitions();
- LOG.debug("Supports schemas in table definitions? " + boolrc);
+ boolrc = md.supportsOuterJoins();
+ LOG.debug("Supports outer joins? " + boolrc);
- boolrc = md.supportsSelectForUpdate();
- LOG.debug("Supports select for update? " + boolrc);
+ boolrc = md.supportsPositionedDelete();
+ LOG.debug("Supports positioned delete? " + boolrc);
- boolrc = md.supportsStoredProcedures();
- LOG.debug("Supports stored procedures? " + boolrc);
+ boolrc = md.supportsPositionedUpdate();
+ LOG.debug("Supports positioned update? " + boolrc);
- boolrc = md.supportsTransactions();
- LOG.debug("Supports transactions? " + boolrc);
+ boolrc = md.supportsResultSetHoldability(ResultSet.HOLD_CURSORS_OVER_COMMIT);
+ LOG.debug("Supports result set holdability? " + boolrc);
- boolrc = md.supportsUnion();
- LOG.debug("Supports union? " + boolrc);
+ boolrc = md.supportsResultSetType(ResultSet.HOLD_CURSORS_OVER_COMMIT);
+ LOG.debug("Supports result set type? " + boolrc);
- boolrc = md.supportsUnionAll();
- LOG.debug("Supports union all? " + boolrc);
+ boolrc = md.supportsSavepoints();
+ LOG.debug("Supports savepoints? " + boolrc);
- }
+ boolrc = md.supportsSchemasInDataManipulation();
+ LOG.debug("Supports schemas in data manipulation? " + boolrc);
- @Test
- public void setSchema() throws SQLException {
- try (Statement stmt = conn.createStatement(ResultSet.TYPE_SCROLL_INSENSITIVE,
- ResultSet.CONCUR_READ_ONLY)) {
+ boolrc = md.supportsSchemasInIndexDefinitions();
+ LOG.debug("Supports schemas in index definitions? " + boolrc);
- final String dbName = "bigtop_jdbc_test_db";
+ boolrc = md.supportsSchemasInPrivilegeDefinitions();
+ LOG.debug("Supports schemas in privilege definitions? " + boolrc);
- final String tableName = "bigtop_jdbc_test_table";
- stmt.execute("drop table if exists " + tableName);
+ boolrc = md.supportsSchemasInProcedureCalls();
+ LOG.debug("Supports schemas in procedure calls? " + boolrc);
- stmt.execute("drop database if exists " + dbName + " cascade");
- stmt.execute("create database " + dbName);
+ boolrc = md.supportsSchemasInTableDefinitions();
+ LOG.debug("Supports schemas in table definitions? " + boolrc);
- conn.setSchema(dbName);
+ boolrc = md.supportsSelectForUpdate();
+ LOG.debug("Supports select for update? " + boolrc);
- DatabaseMetaData md = conn.getMetaData();
+ boolrc = md.supportsStoredProcedures();
+ LOG.debug("Supports stored procedures? " + boolrc);
- ResultSet rs = md.getSchemas(null, dbName);
+ boolrc = md.supportsTransactions();
+ LOG.debug("Supports transactions? " + boolrc);
- while (rs.next()) {
- String schemaName = rs.getString(2);
- LOG.debug("Schema name is " + schemaName);
- }
+ boolrc = md.supportsUnion();
+ LOG.debug("Supports union? " + boolrc);
- stmt.execute("create table " + tableName + " (i int, s varchar(32))");
+ boolrc = md.supportsUnionAll();
+ LOG.debug("Supports union all? " + boolrc);
- rs = md.getTables(null, dbName, tableName, null);
- while (rs.next()) {
- String tName = rs.getString(3);
- LOG.debug("Schema name is " + tName);
- }
-
- rs = md.getColumns(null, dbName, tableName, "i");
- while (rs.next()) {
- String colName = rs.getString(4);
- LOG.debug("Schema name is " + colName);
- }
-
- rs = md.getFunctions(null, dbName, "foo");
- while (rs.next()) {
- String funcName = rs.getString(3);
- LOG.debug("Schema name is " + funcName);
- }
- }
- }
-
- @Test
- public void statement() throws SQLException {
- try (Statement stmt = conn.createStatement()) {
- stmt.cancel();
}
- try (Statement stmt = conn.createStatement()) {
- stmt.clearWarnings();
-
- final String tableName = "bigtop_jdbc_statement_test_table";
-
- stmt.execute("drop table if exists " + tableName);
- stmt.execute("create table " + tableName + " (a int, b varchar(32))");
-
- stmt.executeUpdate("insert into " + tableName + " values (1, 'abc'), (2, 'def')");
-
- int intrc = stmt.getUpdateCount();
- LOG.debug("Update count is " + intrc);
-
- ResultSet rs = stmt.executeQuery("select * from " + tableName);
- while (rs.next()) {
- LOG.debug("Fetched " + rs.getInt(1) + "," + rs.getString(2));
- }
-
- Connection localConn = stmt.getConnection();
-
- intrc = stmt.getFetchDirection();
- LOG.debug("Fetch direction is " + intrc);
+ @Test
+ public void setSchema() throws SQLException {
+ try (Statement stmt = conn.createStatement(ResultSet.TYPE_SCROLL_INSENSITIVE,
+ ResultSet.CONCUR_READ_ONLY)) {
- intrc = stmt.getFetchSize();
- LOG.debug("Fetch size is " + intrc);
+ final String dbName = "bigtop_jdbc_test_db";
- intrc = stmt.getMaxRows();
- LOG.debug("max rows is " + intrc);
+ final String tableName = "bigtop_jdbc_test_table";
+ stmt.execute("drop table if exists " + tableName);
- boolean boolrc = stmt.getMoreResults();
- LOG.debug("more results is " + boolrc);
+ stmt.execute("drop database if exists " + dbName + " cascade");
+ stmt.execute("create database " + dbName);
- intrc = stmt.getQueryTimeout();
- LOG.debug("query timeout is " + intrc);
+ conn.setSchema(dbName);
- stmt.execute("select * from " + tableName);
- rs = stmt.getResultSet();
- while (rs.next()) {
- LOG.debug("Fetched " + rs.getInt(1) + "," + rs.getString(2));
- }
+ DatabaseMetaData md = conn.getMetaData();
- intrc = stmt.getResultSetType();
- LOG.debug("result set type is " + intrc);
+ ResultSet rs = md.getSchemas(null, dbName);
- SQLWarning warning = stmt.getWarnings();
- while (warning != null) {
- LOG.debug("Found a warning: " + warning.getMessage());
- warning = warning.getNextWarning();
- }
+ while (rs.next()) {
+ String schemaName = rs.getString(2);
+ LOG.debug("Schema name is " + schemaName);
+ }
- boolrc = stmt.isClosed();
- LOG.debug("is closed " + boolrc);
+ stmt.execute("create table " + tableName + " (i int, s varchar(32))");
- boolrc = stmt.isCloseOnCompletion();
- LOG.debug("is close on completion " + boolrc);
+ rs = md.getTables(null, dbName, tableName, null);
+ while (rs.next()) {
+ String tName = rs.getString(3);
+ LOG.debug("Schema name is " + tName);
+ }
- boolrc = stmt.isPoolable();
- LOG.debug("is poolable " + boolrc);
-
- stmt.setFetchDirection(ResultSet.FETCH_FORWARD);
- stmt.setFetchSize(500);
- stmt.setMaxRows(500);
- }
- }
-
- @Test
- public void preparedStmtAndResultSet() throws SQLException {
- final String tableName = "bigtop_jdbc_psars_test_table";
- try (Statement stmt = conn.createStatement()) {
- stmt.execute("drop table if exists " + tableName);
- stmt.execute("create table " + tableName + " (bo boolean, ti tinyint, db double, fl float, " +
- "i int, lo bigint, sh smallint, st varchar(32))");
- }
-
- // NOTE Hive 1.2 theoretically support binary, Date & Timestamp in JDBC, but I get errors when I
- // try to put them in the query.
- try (PreparedStatement ps = conn.prepareStatement("insert into " + tableName +
- " values (?, ?, ?, ?, ?, ?, ?, ?)")) {
- ps.setBoolean(1, true);
- ps.setByte(2, (byte)1);
- ps.setDouble(3, 3.141592654);
- ps.setFloat(4, 3.14f);
- ps.setInt(5, 3);
- ps.setLong(6, 10L);
- ps.setShort(7, (short)20);
- ps.setString(8, "abc");
- ps.executeUpdate();
- }
-
- try (PreparedStatement ps = conn.prepareStatement("insert into " + tableName + " (i, st) " +
- "values(?, ?)", ResultSet.TYPE_SCROLL_INSENSITIVE, ResultSet.CONCUR_READ_ONLY)) {
- ps.setNull(1, Types.INTEGER);
- ps.setObject(2, "mary had a little lamb");
- ps.executeUpdate();
- ps.setNull(1, Types.INTEGER, null);
- ps.setString(2, "its fleece was white as snow");
- ps.clearParameters();
- ps.setNull(1, Types.INTEGER, null);
- ps.setString(2, "its fleece was white as snow");
- ps.execute();
+ rs = md.getColumns(null, dbName, tableName, "i");
+ while (rs.next()) {
+ String colName = rs.getString(4);
+ LOG.debug("Schema name is " + colName);
+ }
+ rs = md.getFunctions(null, dbName, "foo");
+ while (rs.next()) {
+ String funcName = rs.getString(3);
+ LOG.debug("Schema name is " + funcName);
+ }
+ }
}
- try (Statement stmt = conn.createStatement()) {
+ @Test
+ public void statement() throws SQLException {
+ try (Statement stmt = conn.createStatement()) {
+ stmt.cancel();
+ }
- ResultSet rs = stmt.executeQuery("select * from " + tableName);
+ try (Statement stmt = conn.createStatement()) {
+ stmt.clearWarnings();
- ResultSetMetaData md = rs.getMetaData();
+ final String tableName = "bigtop_jdbc_statement_test_table";
- int colCnt = md.getColumnCount();
- LOG.debug("Column count is " + colCnt);
+ stmt.execute("drop table if exists " + tableName);
+ stmt.execute("create table " + tableName + " (a int, b varchar(32))");
- for (int i = 1; i <= colCnt; i++) {
- LOG.debug("Looking at column " + i);
- String strrc = md.getColumnClassName(i);
- LOG.debug("Column class name is " + strrc);
+ stmt.executeUpdate("insert into " + tableName + " values (1, 'abc'), (2, 'def')");
- int intrc = md.getColumnDisplaySize(i);
- LOG.debug("Column display size is " + intrc);
+ int intrc = stmt.getUpdateCount();
+ LOG.debug("Update count is " + intrc);
- strrc = md.getColumnLabel(i);
- LOG.debug("Column label is " + strrc);
+ ResultSet rs = stmt.executeQuery("select * from " + tableName);
+ while (rs.next()) {
+ LOG.debug("Fetched " + rs.getInt(1) + "," + rs.getString(2));
+ }
- strrc = md.getColumnName(i);
- LOG.debug("Column name is " + strrc);
+ Connection localConn = stmt.getConnection();
- intrc = md.getColumnType(i);
- LOG.debug("Column type is " + intrc);
+ intrc = stmt.getFetchDirection();
+ LOG.debug("Fetch direction is " + intrc);
- strrc = md.getColumnTypeName(i);
- LOG.debug("Column type name is " + strrc);
+ intrc = stmt.getFetchSize();
+ LOG.debug("Fetch size is " + intrc);
- intrc = md.getPrecision(i);
- LOG.debug("Precision is " + intrc);
+ intrc = stmt.getMaxRows();
+ LOG.debug("max rows is " + intrc);
- intrc = md.getScale(i);
- LOG.debug("Scale is " + intrc);
+ boolean boolrc = stmt.getMoreResults();
+ LOG.debug("more results is " + boolrc);
- boolean boolrc = md.isAutoIncrement(i);
- LOG.debug("Is auto increment? " + boolrc);
+ intrc = stmt.getQueryTimeout();
+ LOG.debug("query timeout is " + intrc);
- boolrc = md.isCaseSensitive(i);
- LOG.debug("Is case sensitive? " + boolrc);
+ stmt.execute("select * from " + tableName);
+ rs = stmt.getResultSet();
+ while (rs.next()) {
+ LOG.debug("Fetched " + rs.getInt(1) + "," + rs.getString(2));
+ }
- boolrc = md.isCurrency(i);
- LOG.debug("Is currency? " + boolrc);
+ intrc = stmt.getResultSetType();
+ LOG.debug("result set type is " + intrc);
- intrc = md.getScale(i);
- LOG.debug("Scale is " + intrc);
+ SQLWarning warning = stmt.getWarnings();
+ while (warning != null) {
+ LOG.debug("Found a warning: " + warning.getMessage());
+ warning = warning.getNextWarning();
+ }
- intrc = md.isNullable(i);
- LOG.debug("Is nullable? " + intrc);
+ boolrc = stmt.isClosed();
+ LOG.debug("is closed " + boolrc);
- boolrc = md.isReadOnly(i);
- LOG.debug("Is read only? " + boolrc);
+ boolrc = stmt.isCloseOnCompletion();
+ LOG.debug("is close on completion " + boolrc);
- }
+ boolrc = stmt.isPoolable();
+ LOG.debug("is poolable " + boolrc);
- while (rs.next()) {
- LOG.debug("bo = " + rs.getBoolean(1));
- LOG.debug("bo = " + rs.getBoolean("bo"));
- LOG.debug("ti = " + rs.getByte(2));
- LOG.debug("ti = " + rs.getByte("ti"));
- LOG.debug("db = " + rs.getDouble(3));
- LOG.debug("db = " + rs.getDouble("db"));
- LOG.debug("fl = " + rs.getFloat(4));
- LOG.debug("fl = " + rs.getFloat("fl"));
- LOG.debug("i = " + rs.getInt(5));
- LOG.debug("i = " + rs.getInt("i"));
- LOG.debug("lo = " + rs.getLong(6));
- LOG.debug("lo = " + rs.getLong("lo"));
- LOG.debug("sh = " + rs.getShort(7));
- LOG.debug("sh = " + rs.getShort("sh"));
- LOG.debug("st = " + rs.getString(8));
- LOG.debug("st = " + rs.getString("st"));
- LOG.debug("tm = " + rs.getObject(8));
- LOG.debug("tm = " + rs.getObject("st"));
- LOG.debug("tm was null " + rs.wasNull());
- }
- LOG.debug("bo is column " + rs.findColumn("bo"));
-
- int intrc = rs.getConcurrency();
- LOG.debug("concurrency " + intrc);
-
- intrc = rs.getFetchDirection();
- LOG.debug("fetch direction " + intrc);
-
- intrc = rs.getType();
- LOG.debug("type " + intrc);
-
- Statement copy = rs.getStatement();
+ stmt.setFetchDirection(ResultSet.FETCH_FORWARD);
+ stmt.setFetchSize(500);
+ stmt.setMaxRows(500);
+ }
+ }
- SQLWarning warning = rs.getWarnings();
- while (warning != null) {
- LOG.debug("Found a warning: " + warning.getMessage());
- warning = warning.getNextWarning();
- }
- rs.clearWarnings();
+ @Test
+ public void preparedStmtAndResultSet() throws SQLException {
+ final String tableName = "bigtop_jdbc_psars_test_table";
+ try (Statement stmt = conn.createStatement()) {
+ stmt.execute("drop table if exists " + tableName);
+ stmt.execute("create table " + tableName + " (bo boolean, ti tinyint, db double, fl float, " +
+ "i int, lo bigint, sh smallint, st varchar(32))");
+ }
+
+ // NOTE Hive 1.2 theoretically support binary, Date & Timestamp in JDBC, but I get errors when I
+ // try to put them in the query.
+ try (PreparedStatement ps = conn.prepareStatement("insert into " + tableName +
+ " values (?, ?, ?, ?, ?, ?, ?, ?)")) {
+ ps.setBoolean(1, true);
+ ps.setByte(2, (byte) 1);
+ ps.setDouble(3, 3.141592654);
+ ps.setFloat(4, 3.14f);
+ ps.setInt(5, 3);
+ ps.setLong(6, 10L);
+ ps.setShort(7, (short) 20);
+ ps.setString(8, "abc");
+ ps.executeUpdate();
+ }
+
+ try (PreparedStatement ps = conn.prepareStatement("insert into " + tableName + " (i, st) " +
+ "values(?, ?)", ResultSet.TYPE_SCROLL_INSENSITIVE, ResultSet.CONCUR_READ_ONLY)) {
+ ps.setNull(1, Types.INTEGER);
+ ps.setObject(2, "mary had a little lamb");
+ ps.executeUpdate();
+ ps.setNull(1, Types.INTEGER, null);
+ ps.setString(2, "its fleece was white as snow");
+ ps.clearParameters();
+ ps.setNull(1, Types.INTEGER, null);
+ ps.setString(2, "its fleece was white as snow");
+ ps.execute();
+
+ }
+
+ try (Statement stmt = conn.createStatement()) {
+
+ ResultSet rs = stmt.executeQuery("select * from " + tableName);
+
+ ResultSetMetaData md = rs.getMetaData();
+
+ int colCnt = md.getColumnCount();
+ LOG.debug("Column count is " + colCnt);
+
+ for (int i = 1; i <= colCnt; i++) {
+ LOG.debug("Looking at column " + i);
+ String strrc = md.getColumnClassName(i);
+ LOG.debug("Column class name is " + strrc);
+
+ int intrc = md.getColumnDisplaySize(i);
+ LOG.debug("Column display size is " + intrc);
+
+ strrc = md.getColumnLabel(i);
+ LOG.debug("Column label is " + strrc);
+
+ strrc = md.getColumnName(i);
+ LOG.debug("Column name is " + strrc);
+
+ intrc = md.getColumnType(i);
+ LOG.debug("Column type is " + intrc);
+
+ strrc = md.getColumnTypeName(i);
+ LOG.debug("Column type name is " + strrc);
+
+ intrc = md.getPrecision(i);
+ LOG.debug("Precision is " + intrc);
+
+ intrc = md.getScale(i);
+ LOG.debug("Scale is " + intrc);
+
+ boolean boolrc = md.isAutoIncrement(i);
+ LOG.debug("Is auto increment? " + boolrc);
+
+ boolrc = md.isCaseSensitive(i);
+ LOG.debug("Is case sensitive? " + boolrc);
+
+ boolrc = md.isCurrency(i);
+ LOG.debug("Is currency? " + boolrc);
+
+ intrc = md.getScale(i);
+ LOG.debug("Scale is " + intrc);
+
+ intrc = md.isNullable(i);
+ LOG.debug("Is nullable? " + intrc);
+
+ boolrc = md.isReadOnly(i);
+ LOG.debug("Is read only? " + boolrc);
+
+ }
+
+ while (rs.next()) {
+ LOG.debug("bo = " + rs.getBoolean(1));
+ LOG.debug("bo = " + rs.getBoolean("bo"));
+ LOG.debug("ti = " + rs.getByte(2));
+ LOG.debug("ti = " + rs.getByte("ti"));
+ LOG.debug("db = " + rs.getDouble(3));
+ LOG.debug("db = " + rs.getDouble("db"));
+ LOG.debug("fl = " + rs.getFloat(4));
+ LOG.debug("fl = " + rs.getFloat("fl"));
+ LOG.debug("i = " + rs.getInt(5));
+ LOG.debug("i = " + rs.getInt("i"));
+ LOG.debug("lo = " + rs.getLong(6));
+ LOG.debug("lo = " + rs.getLong("lo"));
+ LOG.debug("sh = " + rs.getShort(7));
+ LOG.debug("sh = " + rs.getShort("sh"));
+ LOG.debug("st = " + rs.getString(8));
+ LOG.debug("st = " + rs.getString("st"));
+ LOG.debug("tm = " + rs.getObject(8));
+ LOG.debug("tm = " + rs.getObject("st"));
+ LOG.debug("tm was null " + rs.wasNull());
+ }
+ LOG.debug("bo is column " + rs.findColumn("bo"));
+
+ int intrc = rs.getConcurrency();
+ LOG.debug("concurrency " + intrc);
+
+ intrc = rs.getFetchDirection();
+ LOG.debug("fetch direction " + intrc);
+
+ intrc = rs.getType();
+ LOG.debug("type " + intrc);
+
+ Statement copy = rs.getStatement();
+
+ SQLWarning warning = rs.getWarnings();
+ while (warning != null) {
+ LOG.debug("Found a warning: " + warning.getMessage());
+ warning = warning.getNextWarning();
+ }
+ rs.clearWarnings();
+ }
}
- }
}
diff --git a/bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/TestSql.java b/bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/TestSql.java
index 06af1da2..993dad0a 100644
--- a/bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/TestSql.java
+++ b/bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/TestSql.java
@@ -27,306 +27,306 @@ import java.sql.Statement;
// This does not test every option that Hive supports, but does try to touch the major
// options, especially anything unique to Hive. See each test for areas tested and not tested.
public class TestSql extends JdbcConnector {
- private static final Log LOG = LogFactory.getLog(TestSql.class.getName());
+ private static final Log LOG = LogFactory.getLog(TestSql.class.getName());
- @Test
- public void db() throws SQLException {
- final String db1 = "bigtop_sql_db1";
- final String db2 = "bigtop_sql_db2";
- try (Statement stmt = conn.createStatement()) {
- stmt.execute("drop database if exists " + db1 + " cascade");
+ @Test
+ public void db() throws SQLException {
+ final String db1 = "bigtop_sql_db1";
+ final String db2 = "bigtop_sql_db2";
+ try (Statement stmt = conn.createStatement()) {
+ stmt.execute("drop database if exists " + db1 + " cascade");
- // Simple create database
- stmt.execute("create database " + db1);
- stmt.execute("drop database " + db1);
+ // Simple create database
+ stmt.execute("create database " + db1);
+ stmt.execute("drop database " + db1);
- stmt.execute("drop schema if exists " + db2 + " cascade");
+ stmt.execute("drop schema if exists " + db2 + " cascade");
- String location = getProperty(LOCATION, "a writable directory in HDFS");
+ String location = getProperty(LOCATION, "a writable directory in HDFS");
- // All the bells and whistles
- stmt.execute("create schema if not exists " + db2 + " comment 'a db' location '" + location +
- "' with dbproperties ('a' = 'b')");
+ // All the bells and whistles
+ stmt.execute("create schema if not exists " + db2 + " comment 'a db' location '" + location +
+ "' with dbproperties ('a' = 'b')");
- stmt.execute("alter database " + db2 + " set dbproperties ('c' = 'd')");
+ stmt.execute("alter database " + db2 + " set dbproperties ('c' = 'd')");
- stmt.execute("drop database " + db2 + " restrict");
+ stmt.execute("drop database " + db2 + " restrict");
+ }
}
- }
-
- @Test
- public void table() throws SQLException {
- final String table1 = "bigtop_sql_table1";
- final String table2 = "bigtop_sql_table2";
- final String table3 = "bigtop_sql_table3";
- final String table4 = "bigtop_sql_table4";
- final String table5 = "bigtop_sql_table5";
-
- try (Statement stmt = conn.createStatement()) {
- stmt.execute("drop table if exists " + table1);
- stmt.execute("drop table if exists " + table2);
- stmt.execute("drop table if exists " + table3);
- stmt.execute("drop table if exists " + table4);
- stmt.execute("drop table if exists " + table5);
-
- String location = getProperty(LOCATION, "a writable directory in HDFS");
- stmt.execute("create external table " + table1 + "(a int, b varchar(32)) location '" +
- location + "'");
-
- // With a little bit of everything, except partitions, we'll do those below
- stmt.execute("create table if not exists " + table2 +
- "(c1 tinyint," +
- " c2 smallint," +
- " c3 int comment 'a column comment'," +
- " c4 bigint," +
- " c5 float," +
- " c6 double," +
- " c7 decimal," +
- " c8 decimal(12)," +
- " c9 decimal(8,2)," +
- " c10 timestamp," +
- " c11 date," +
- " c12 string," +
- " c13 varchar(120)," +
- " c14 char(10)," +
- " c15 boolean," +
- " c16 binary," +
- " c17 array<string>," +
- " c18 map <string, string>," +
- " c19 struct<s1:int, s2:bigint>," +
- " c20 uniontype<int, string>) " +
- "comment 'table comment'" +
- "clustered by (c1) sorted by (c2) into 10 buckets " +
- "stored as orc " +
- "tblproperties ('a' = 'b')");
-
- // Not testing SKEWED BY, ROW FORMAT, STORED BY (storage handler
-
- stmt.execute("create temporary table " + table3 + " like " + table2);
-
- stmt.execute("insert into " + table1 + " values (3, 'abc'), (4, 'def')");
-
- stmt.execute("create table " + table4 + " as select a, b from " + table1);
-
- stmt.execute("truncate table " + table4);
-
- stmt.execute("alter table " + table4 + " rename to " + table5);
- stmt.execute("alter table " + table2 + " set tblproperties ('c' = 'd')");
-
- // Not testing alter of clustered or sorted by, because that's suicidal
- // Not testing alter of skewed or serde properties since we didn't test it for create
- // above.
-
- stmt.execute("drop table " + table1 + " purge");
- stmt.execute("drop table " + table2);
- stmt.execute("drop table " + table3);
- stmt.execute("drop table " + table5);
+
+ @Test
+ public void table() throws SQLException {
+ final String table1 = "bigtop_sql_table1";
+ final String table2 = "bigtop_sql_table2";
+ final String table3 = "bigtop_sql_table3";
+ final String table4 = "bigtop_sql_table4";
+ final String table5 = "bigtop_sql_table5";
+
+ try (Statement stmt = conn.createStatement()) {
+ stmt.execute("drop table if exists " + table1);
+ stmt.execute("drop table if exists " + table2);
+ stmt.execute("drop table if exists " + table3);
+ stmt.execute("drop table if exists " + table4);
+ stmt.execute("drop table if exists " + table5);
+
+ String location = getProperty(LOCATION, "a writable directory in HDFS");
+ stmt.execute("create external table " + table1 + "(a int, b varchar(32)) location '" +
+ location + "'");
+
+ // With a little bit of everything, except partitions, we'll do those below
+ stmt.execute("create table if not exists " + table2 +
+ "(c1 tinyint," +
+ " c2 smallint," +
+ " c3 int comment 'a column comment'," +
+ " c4 bigint," +
+ " c5 float," +
+ " c6 double," +
+ " c7 decimal," +
+ " c8 decimal(12)," +
+ " c9 decimal(8,2)," +
+ " c10 timestamp," +
+ " c11 date," +
+ " c12 string," +
+ " c13 varchar(120)," +
+ " c14 char(10)," +
+ " c15 boolean," +
+ " c16 binary," +
+ " c17 array<string>," +
+ " c18 map <string, string>," +
+ " c19 struct<s1:int, s2:bigint>," +
+ " c20 uniontype<int, string>) " +
+ "comment 'table comment'" +
+ "clustered by (c1) sorted by (c2) into 10 buckets " +
+ "stored as orc " +
+ "tblproperties ('a' = 'b')");
+
+ // Not testing SKEWED BY, ROW FORMAT, STORED BY (storage handler
+
+ stmt.execute("create temporary table " + table3 + " like " + table2);
+
+ stmt.execute("insert into " + table1 + " values (3, 'abc'), (4, 'def')");
+
+ stmt.execute("create table " + table4 + " as select a, b from " + table1);
+
+ stmt.execute("truncate table " + table4);
+
+ stmt.execute("alter table " + table4 + " rename to " + table5);
+ stmt.execute("alter table " + table2 + " set tblproperties ('c' = 'd')");
+
+ // Not testing alter of clustered or sorted by, because that's suicidal
+ // Not testing alter of skewed or serde properties since we didn't test it for create
+ // above.
+
+ stmt.execute("drop table " + table1 + " purge");
+ stmt.execute("drop table " + table2);
+ stmt.execute("drop table " + table3);
+ stmt.execute("drop table " + table5);
+ }
}
- }
- @Test
- public void partitionedTable() throws SQLException {
- final String table1 = "bigtop_sql_ptable1";
- try (Statement stmt = conn.createStatement()) {
- stmt.execute("drop table if exists " + table1);
+ @Test
+ public void partitionedTable() throws SQLException {
+ final String table1 = "bigtop_sql_ptable1";
+ try (Statement stmt = conn.createStatement()) {
+ stmt.execute("drop table if exists " + table1);
- stmt.execute("create table " + table1 +
- "(c1 int," +
- " c2 varchar(32))" +
- "partitioned by (p1 string comment 'a partition column')" +
- "stored as orc");
+ stmt.execute("create table " + table1 +
+ "(c1 int," +
+ " c2 varchar(32))" +
+ "partitioned by (p1 string comment 'a partition column')" +
+ "stored as orc");
- stmt.execute("alter table " + table1 + " add partition (p1 = 'a')");
- stmt.execute("insert into " + table1 + " partition (p1 = 'a') values (1, 'abc')");
- stmt.execute("insert into " + table1 + " partition (p1 = 'a') values (2, 'def')");
- stmt.execute("insert into " + table1 + " partition (p1 = 'a') values (3, 'ghi')");
- stmt.execute("alter table " + table1 + " partition (p1 = 'a') concatenate");
- stmt.execute("alter table " + table1 + " touch partition (p1 = 'a')");
+ stmt.execute("alter table " + table1 + " add partition (p1 = 'a')");
+ stmt.execute("insert into " + table1 + " partition (p1 = 'a') values (1, 'abc')");
+ stmt.execute("insert into " + table1 + " partition (p1 = 'a') values (2, 'def')");
+ stmt.execute("insert into " + table1 + " partition (p1 = 'a') values (3, 'ghi')");
+ stmt.execute("alter table " + table1 + " partition (p1 = 'a') concatenate");
+ stmt.execute("alter table " + table1 + " touch partition (p1 = 'a')");
- stmt.execute("alter table " + table1 + " add columns (c3 float)");
- stmt.execute("alter table " + table1 + " drop partition (p1 = 'a')");
+ stmt.execute("alter table " + table1 + " add columns (c3 float)");
+ stmt.execute("alter table " + table1 + " drop partition (p1 = 'a')");
- // Not testing rename partition, exchange partition, msck repair, archive/unarchive,
- // set location, enable/disable no_drop/offline, compact (because not everyone may have
- // ACID on), change column
+ // Not testing rename partition, exchange partition, msck repair, archive/unarchive,
+ // set location, enable/disable no_drop/offline, compact (because not everyone may have
+ // ACID on), change column
- stmt.execute("drop table " + table1);
+ stmt.execute("drop table " + table1);
+ }
}
- }
-
- @Test
- public void view() throws SQLException {
- final String table1 = "bigtop_sql_vtable1";
- final String view1 = "bigtop_sql_view1";
- final String view2 = "bigtop_sql_view2";
- try (Statement stmt = conn.createStatement()) {
- stmt.execute("drop table if exists " + table1);
- stmt.execute("drop view if exists " + view1);
- stmt.execute("drop view if exists " + view2);
- stmt.execute("create table " + table1 + "(a int, b varchar(32))");
- stmt.execute("create view " + view1 + " as select a from " + table1);
-
- stmt.execute("create view if not exists " + view2 +
- " comment 'a view comment' " +
- "tblproperties ('a' = 'b') " +
- "as select b from " + table1);
-
- stmt.execute("alter view " + view1 + " as select a, b from " + table1);
- stmt.execute("alter view " + view2 + " set tblproperties('c' = 'd')");
-
- stmt.execute("drop view " + view1);
- stmt.execute("drop view " + view2);
+
+ @Test
+ public void view() throws SQLException {
+ final String table1 = "bigtop_sql_vtable1";
+ final String view1 = "bigtop_sql_view1";
+ final String view2 = "bigtop_sql_view2";
+ try (Statement stmt = conn.createStatement()) {
+ stmt.execute("drop table if exists " + table1);
+ stmt.execute("drop view if exists " + view1);
+ stmt.execute("drop view if exists " + view2);
+ stmt.execute("create table " + table1 + "(a int, b varchar(32))");
+ stmt.execute("create view " + view1 + " as select a from " + table1);
+
+ stmt.execute("create view if not exists " + view2 +
+ " comment 'a view comment' " +
+ "tblproperties ('a' = 'b') " +
+ "as select b from " + table1);
+
+ stmt.execute("alter view " + view1 + " as select a, b from " + table1);
+ stmt.execute("alter view " + view2 + " set tblproperties('c' = 'd')");
+
+ stmt.execute("drop view " + view1);
+ stmt.execute("drop view " + view2);
+ }
}
- }
- // Not testing indices because they are currently useless in Hive
- // Not testing macros because as far as I know no one uses them
+ // Not testing indices because they are currently useless in Hive
+ // Not testing macros because as far as I know no one uses them
- @Test
- public void function() throws SQLException {
- final String func1 = "bigtop_sql_func1";
- final String func2 = "bigtop_sql_func2";
- try (Statement stmt = conn.createStatement()) {
- stmt.execute("create temporary function " + func1 +
- " as 'org.apache.hadoop.hive.ql.udf.UDFToInteger'");
- stmt.execute("drop temporary function " + func1);
+ @Test
+ public void function() throws SQLException {
+ final String func1 = "bigtop_sql_func1";
+ final String func2 = "bigtop_sql_func2";
+ try (Statement stmt = conn.createStatement()) {
+ stmt.execute("create temporary function " + func1 +
+ " as 'org.apache.hadoop.hive.ql.udf.UDFToInteger'");
+ stmt.execute("drop temporary function " + func1);
- stmt.execute("drop function if exists " + func2);
+ stmt.execute("drop function if exists " + func2);
- stmt.execute("create function " + func2 +
- " as 'org.apache.hadoop.hive.ql.udf.UDFToInteger'");
- stmt.execute("drop function " + func2);
+ stmt.execute("create function " + func2 +
+ " as 'org.apache.hadoop.hive.ql.udf.UDFToInteger'");
+ stmt.execute("drop function " + func2);
+ }
}
- }
-
- // Not testing grant/revoke/roles as different vendors use different security solutions
- // and hence different things will work here.
-
- // This covers insert (non-partitioned, partitioned, dynamic partitions, overwrite, with
- // values and select), and multi-insert. Load is not tested as there's no guarantee that the
- // test machine has access to HDFS and thus the ability to upload a file.
- @Test
- public void insert() throws SQLException {
- final String table1 = "bigtop_insert_table1";
- final String table2 = "bigtop_insert_table2";
- try (Statement stmt = conn.createStatement()) {
- stmt.execute("drop table if exists " + table1);
- stmt.execute("create table " + table1 +
- "(c1 tinyint," +
- " c2 smallint," +
- " c3 int," +
- " c4 bigint," +
- " c5 float," +
- " c6 double," +
- " c7 decimal(8,2)," +
- " c8 varchar(120)," +
- " c9 char(10)," +
- " c10 boolean)" +
- " partitioned by (p1 string)");
-
- // insert with partition
- stmt.execute("explain insert into " + table1 + " partition (p1 = 'a') values " +
- "(1, 2, 3, 4, 1.1, 2.2, 3.3, 'abcdef', 'ghi', true)," +
- "(5, 6, 7, 8, 9.9, 8.8, 7.7, 'jklmno', 'pqr', true)");
-
- stmt.execute("set hive.exec.dynamic.partition.mode=nonstrict");
-
- // dynamic partition
- stmt.execute("explain insert into " + table1 + " partition (p1) values " +
- "(1, 2, 3, 4, 1.1, 2.2, 3.3, 'abcdef', 'ghi', true, 'b')," +
- "(5, 6, 7, 8, 9.9, 8.8, 7.7, 'jklmno', 'pqr', true, 'b')");
-
- stmt.execute("drop table if exists " + table2);
-
- stmt.execute("create table " + table2 +
- "(c1 tinyint," +
- " c2 smallint," +
- " c3 int," +
- " c4 bigint," +
- " c5 float," +
- " c6 double," +
- " c7 decimal(8,2)," +
- " c8 varchar(120)," +
- " c9 char(10)," +
- " c10 boolean)");
-
- stmt.execute("explain insert into " + table2 + " values " +
- "(1, 2, 3, 4, 1.1, 2.2, 3.3, 'abcdef', 'ghi', true)," +
- "(5, 6, 7, 8, 9.9, 8.8, 7.7, 'jklmno', 'pqr', true)");
-
- stmt.execute("explain insert overwrite table " + table2 + " select c1, c2, c3, c4, c5, c6, " +
- "c7, c8, c9, c10 from " + table1);
-
- // multi-insert
- stmt.execute("from " + table1 +
- " insert into table " + table1 + " partition (p1 = 'c') " +
- " select c1, c2, c3, c4, c5, c6, c7, c8, c9, c10" +
- " insert into table " + table2 + " select c1, c2, c3, c4, c5, c6, c7, c8, c9, c10");
+
+ // Not testing grant/revoke/roles as different vendors use different security solutions
+ // and hence different things will work here.
+
+ // This covers insert (non-partitioned, partitioned, dynamic partitions, overwrite, with
+ // values and select), and multi-insert. Load is not tested as there's no guarantee that the
+ // test machine has access to HDFS and thus the ability to upload a file.
+ @Test
+ public void insert() throws SQLException {
+ final String table1 = "bigtop_insert_table1";
+ final String table2 = "bigtop_insert_table2";
+ try (Statement stmt = conn.createStatement()) {
+ stmt.execute("drop table if exists " + table1);
+ stmt.execute("create table " + table1 +
+ "(c1 tinyint," +
+ " c2 smallint," +
+ " c3 int," +
+ " c4 bigint," +
+ " c5 float," +
+ " c6 double," +
+ " c7 decimal(8,2)," +
+ " c8 varchar(120)," +
+ " c9 char(10)," +
+ " c10 boolean)" +
+ " partitioned by (p1 string)");
+
+ // insert with partition
+ stmt.execute("explain insert into " + table1 + " partition (p1 = 'a') values " +
+ "(1, 2, 3, 4, 1.1, 2.2, 3.3, 'abcdef', 'ghi', true)," +
+ "(5, 6, 7, 8, 9.9, 8.8, 7.7, 'jklmno', 'pqr', true)");
+
+ stmt.execute("set hive.exec.dynamic.partition.mode=nonstrict");
+
+ // dynamic partition
+ stmt.execute("explain insert into " + table1 + " partition (p1) values " +
+ "(1, 2, 3, 4, 1.1, 2.2, 3.3, 'abcdef', 'ghi', true, 'b')," +
+ "(5, 6, 7, 8, 9.9, 8.8, 7.7, 'jklmno', 'pqr', true, 'b')");
+
+ stmt.execute("drop table if exists " + table2);
+
+ stmt.execute("create table " + table2 +
+ "(c1 tinyint," +
+ " c2 smallint," +
+ " c3 int," +
+ " c4 bigint," +
+ " c5 float," +
+ " c6 double," +
+ " c7 decimal(8,2)," +
+ " c8 varchar(120)," +
+ " c9 char(10)," +
+ " c10 boolean)");
+
+ stmt.execute("explain insert into " + table2 + " values " +
+ "(1, 2, 3, 4, 1.1, 2.2, 3.3, 'abcdef', 'ghi', true)," +
+ "(5, 6, 7, 8, 9.9, 8.8, 7.7, 'jklmno', 'pqr', true)");
+
+ stmt.execute("explain insert overwrite table " + table2 + " select c1, c2, c3, c4, c5, c6, " +
+ "c7, c8, c9, c10 from " + table1);
+
+ // multi-insert
+ stmt.execute("from " + table1 +
+ " insert into table " + table1 + " partition (p1 = 'c') " +
+ " select c1, c2, c3, c4, c5, c6, c7, c8, c9, c10" +
+ " insert into table " + table2 + " select c1, c2, c3, c4, c5, c6, c7, c8, c9, c10");
+ }
}
- }
-
- // This tests CTEs
- @Test
- public void cte() throws SQLException {
- final String table1 = "bigtop_cte_table1";
- try (Statement stmt = conn.createStatement()) {
- stmt.execute("drop table if exists " + table1);
- stmt.execute("create table " + table1 + "(c1 int, c2 varchar(32))");
- stmt.execute("with cte1 as (select c1 from " + table1 + " where c1 < 10) " +
- " select c1 from cte1");
+
+ // This tests CTEs
+ @Test
+ public void cte() throws SQLException {
+ final String table1 = "bigtop_cte_table1";
+ try (Statement stmt = conn.createStatement()) {
+ stmt.execute("drop table if exists " + table1);
+ stmt.execute("create table " + table1 + "(c1 int, c2 varchar(32))");
+ stmt.execute("with cte1 as (select c1 from " + table1 + " where c1 < 10) " +
+ " select c1 from cte1");
+ }
}
- }
- // This tests select, including CTEs, all/distinct, single tables, joins (inner & outer),
- // group by (w/ and w/o having), order by, cluster by/distribute by/sort by, limit, union,
- // subqueries, and over.
+ // This tests select, including CTEs, all/distinct, single tables, joins (inner & outer),
+ // group by (w/ and w/o having), order by, cluster by/distribute by/sort by, limit, union,
+ // subqueries, and over.
- @Test
- public void select() throws SQLException {
- final String[] tables = {"bigtop_select_table1", "bigtop_select_table2"};
- try (Statement stmt = conn.createStatement()) {
- for (int i = 0; i < tables.length; i++) {
- stmt.execute("drop table if exists " + tables[i]);
- stmt.execute("create table " + tables[i] + "(c1 int, c2 varchar(32))");
- }
+ @Test
+ public void select() throws SQLException {
+ final String[] tables = {"bigtop_select_table1", "bigtop_select_table2"};
+ try (Statement stmt = conn.createStatement()) {
+ for (int i = 0; i < tables.length; i++) {
+ stmt.execute("drop table if exists " + tables[i]);
+ stmt.execute("create table " + tables[i] + "(c1 int, c2 varchar(32))");
+ }
- // single table queries tested above in several places
+ // single table queries tested above in several places
- stmt.execute("explain select all a.c2, SUM(a.c1), SUM(b.c1) " +
- "from " + tables[0] + " a join " + tables[1] + " b on (a.c2 = b.c2) " +
- "group by a.c2 " +
- "order by a.c2 asc " +
- "limit 10");
+ stmt.execute("explain select all a.c2, SUM(a.c1), SUM(b.c1) " +
+ "from " + tables[0] + " a join " + tables[1] + " b on (a.c2 = b.c2) " +
+ "group by a.c2 " +
+ "order by a.c2 asc " +
+ "limit 10");
- stmt.execute("explain select distinct a.c2 " +
- "from " + tables[0] + " a left outer join " + tables[1] + " b on (a.c2 = b.c2) " +
- "order by a.c2 desc ");
+ stmt.execute("explain select distinct a.c2 " +
+ "from " + tables[0] + " a left outer join " + tables[1] + " b on (a.c2 = b.c2) " +
+ "order by a.c2 desc ");
- stmt.execute("explain select a.c2, SUM(a.c1) " +
- "from " + tables[0] + " a right outer join " + tables[1] + " b on (a.c2 = b.c2) " +
- "group by a.c2 " +
- "having SUM(b.c1) > 0 " +
- "order by a.c2 ");
+ stmt.execute("explain select a.c2, SUM(a.c1) " +
+ "from " + tables[0] + " a right outer join " + tables[1] + " b on (a.c2 = b.c2) " +
+ "group by a.c2 " +
+ "having SUM(b.c1) > 0 " +
+ "order by a.c2 ");
- stmt.execute("explain select a.c2, rank() over (partition by a.c1) " +
- "from " + tables[0] + " a full outer join " + tables[1] + " b on (a.c2 = b.c2) ");
+ stmt.execute("explain select a.c2, rank() over (partition by a.c1) " +
+ "from " + tables[0] + " a full outer join " + tables[1] + " b on (a.c2 = b.c2) ");
- stmt.execute("explain select c2 from " + tables[0] + " union all select c2 from " + tables[1]);
+ stmt.execute("explain select c2 from " + tables[0] + " union all select c2 from " + tables[1]);
- stmt.execute("explain select * from " + tables[0] + " distribute by c1 sort by c2");
- stmt.execute("explain select * from " + tables[0] + " cluster by c1");
+ stmt.execute("explain select * from " + tables[0] + " distribute by c1 sort by c2");
+ stmt.execute("explain select * from " + tables[0] + " cluster by c1");
- stmt.execute("explain select * from (select c1 from " + tables[0] + ") t");
- stmt.execute("explain select * from " + tables[0] + " where c1 in (select c1 from " + tables[1] +
- ")");
+ stmt.execute("explain select * from (select c1 from " + tables[0] + ") t");
+ stmt.execute("explain select * from " + tables[0] + " where c1 in (select c1 from " + tables[1] +
+ ")");
- }
+ }
- }
+ }
- // Update and delete are not tested because not everyone configures their system to run
- // with ACID.
+ // Update and delete are not tested because not everyone configures their system to run
+ // with ACID.
}
diff --git a/bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/TestThrift.java b/bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/TestThrift.java
index f54b7e5a..8139eff0 100644
--- a/bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/TestThrift.java
+++ b/bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/TestThrift.java
@@ -45,207 +45,207 @@ import java.util.Random;
public class TestThrift {
- private static final Log LOG = LogFactory.getLog(TestThrift.class.getName());
+ private static final Log LOG = LogFactory.getLog(TestThrift.class.getName());
+
+ private static IMetaStoreClient client = null;
+ private static HiveConf conf;
+
+ private Random rand;
+
+ @BeforeClass
+ public static void connect() throws MetaException {
+ if (JdbcConnector.testActive(JdbcConnector.TEST_THRIFT, "Test Thrift ")) {
+ String url = JdbcConnector.getProperty(JdbcConnector.METASTORE_URL, "Thrift metastore URL");
+ conf = new HiveConf();
+ conf.setVar(HiveConf.ConfVars.METASTOREURIS, url);
+ LOG.info("Set to test against metastore at " + url);
+ client = new HiveMetaStoreClient(conf);
+ }
+ }
+
+ @Before
+ public void checkIfActive() {
+ Assume.assumeTrue(JdbcConnector.testActive(JdbcConnector.TEST_THRIFT, "Test Thrift "));
+ rand = new Random();
+ }
+
+ @Test
+ public void db() throws TException {
+ final String dbName = "bigtop_thrift_db_" + rand.nextInt(Integer.MAX_VALUE);
+
+ Database db = new Database(dbName, "a db", null, new HashMap<String, String>());
+ client.createDatabase(db);
+ db = client.getDatabase(dbName);
+ Assert.assertNotNull(db);
+ db = new Database(db);
+ db.getParameters().put("a", "b");
+ client.alterDatabase(dbName, db);
+ List<String> alldbs = client.getDatabases("bigtop_*");
+ Assert.assertNotNull(alldbs);
+ Assert.assertTrue(alldbs.size() > 0);
+ alldbs = client.getAllDatabases();
+ Assert.assertNotNull(alldbs);
+ Assert.assertTrue(alldbs.size() > 0);
+ client.dropDatabase(dbName, true, true);
+ }
+
+ // Not testing types calls, as they aren't used AFAIK
+
+ @Test
+ public void nonPartitionedTable() throws TException {
+ final String tableName = "bigtop_thrift_table_" + rand.nextInt(Integer.MAX_VALUE);
+
+ // I don't test every operation related to tables, but only those that are frequently used.
+ SerDeInfo serde = new SerDeInfo("default_serde",
+ conf.getVar(HiveConf.ConfVars.HIVEDEFAULTSERDE), new HashMap<String, String>());
+ FieldSchema fs = new FieldSchema("a", "int", "no comment");
+ StorageDescriptor sd = new StorageDescriptor(Collections.singletonList(fs), null,
+ conf.getVar(HiveConf.ConfVars.HIVEDEFAULTFILEFORMAT),
+ conf.getVar(HiveConf.ConfVars.HIVEDEFAULTFILEFORMAT), false, 0, serde, null, null,
+ new HashMap<String, String>());
+ Table table = new Table(tableName, "default", "me", 0, 0, 0, sd, null,
+ new HashMap<String, String>(), null, null, TableType.MANAGED_TABLE.toString());
+ client.createTable(table);
+
+ table = client.getTable("default", tableName);
+ Assert.assertNotNull(table);
+
+ List<Table> tables =
+ client.getTableObjectsByName("default", Collections.singletonList(tableName));
+ Assert.assertNotNull(tables);
+ Assert.assertEquals(1, tables.size());
+
+ List<String> tableNames = client.getTables("default", "bigtop_*");
+ Assert.assertNotNull(tableNames);
+ Assert.assertTrue(tableNames.size() >= 1);
+
+ tableNames = client.getAllTables("default");
+ Assert.assertNotNull(tableNames);
+ Assert.assertTrue(tableNames.size() >= 1);
- private static IMetaStoreClient client = null;
- private static HiveConf conf;
+ List<FieldSchema> cols = client.getFields("default", tableName);
+ Assert.assertNotNull(cols);
+ Assert.assertEquals(1, cols.size());
- private Random rand;
+ cols = client.getSchema("default", tableName);
+ Assert.assertNotNull(cols);
+ Assert.assertEquals(1, cols.size());
- @BeforeClass
- public static void connect() throws MetaException {
- if (JdbcConnector.testActive(JdbcConnector.TEST_THRIFT, "Test Thrift ")) {
- String url = JdbcConnector.getProperty(JdbcConnector.METASTORE_URL, "Thrift metastore URL");
- conf = new HiveConf();
- conf.setVar(HiveConf.ConfVars.METASTOREURIS, url);
- LOG.info("Set to test against metastore at " + url);
- client = new HiveMetaStoreClient(conf);
+ table = new Table(table);
+ table.getParameters().put("a", "b");
+ client.alter_table("default", tableName, table, false);
+
+ table.getParameters().put("c", "d");
+ client.alter_table("default", tableName, table);
+
+ client.dropTable("default", tableName, true, false);
+ }
+
+ @Test
+ public void partitionedTable() throws TException {
+ final String tableName = "bigtop_thrift_partitioned_table_" + rand.nextInt(Integer.MAX_VALUE);
+
+ // I don't test every operation related to tables, but only those that are frequently used.
+ SerDeInfo serde = new SerDeInfo("default_serde",
+ conf.getVar(HiveConf.ConfVars.HIVEDEFAULTSERDE), new HashMap<String, String>());
+ FieldSchema fs = new FieldSchema("a", "int", "no comment");
+ StorageDescriptor sd = new StorageDescriptor(Collections.singletonList(fs), null,
+ conf.getVar(HiveConf.ConfVars.HIVEDEFAULTFILEFORMAT),
+ conf.getVar(HiveConf.ConfVars.HIVEDEFAULTFILEFORMAT), false, 0, serde, null, null,
+ new HashMap<String, String>());
+ FieldSchema pk = new FieldSchema("pk", "string", "");
+ Table table = new Table(tableName, "default", "me", 0, 0, 0, sd, Collections.singletonList(pk),
+ new HashMap<String, String>(), null, null, TableType.MANAGED_TABLE.toString());
+ client.createTable(table);
+
+ sd = new StorageDescriptor(Collections.singletonList(fs), null,
+ conf.getVar(HiveConf.ConfVars.HIVEDEFAULTSERDE),
+ conf.getVar(HiveConf.ConfVars.HIVEDEFAULTSERDE), false, 0, serde, null, null,
+ new HashMap<String, String>());
+ Partition partition = new Partition(Collections.singletonList("x"), "default", tableName, 0,
+ 0, sd, new HashMap<String, String>());
+ client.add_partition(partition);
+
+ List<Partition> partitions = new ArrayList<>(2);
+ sd = new StorageDescriptor(Collections.singletonList(fs), null,
+ conf.getVar(HiveConf.ConfVars.HIVEDEFAULTSERDE),
+ conf.getVar(HiveConf.ConfVars.HIVEDEFAULTSERDE), false, 0, serde, null, null,
+ new HashMap<String, String>());
+ partitions.add(new Partition(Collections.singletonList("y"), "default", tableName, 0,
+ 0, sd, new HashMap<String, String>()));
+ sd = new StorageDescriptor(Collections.singletonList(fs), null,
+ conf.getVar(HiveConf.ConfVars.HIVEDEFAULTSERDE),
+ conf.getVar(HiveConf.ConfVars.HIVEDEFAULTSERDE), false, 0, serde, null, null,
+ new HashMap<String, String>());
+ partitions.add(new Partition(Collections.singletonList("z"), "default", tableName, 0,
+ 0, sd, new HashMap<String, String>()));
+ client.add_partitions(partitions);
+
+ List<Partition> parts = client.listPartitions("default", tableName, (short) -1);
+ Assert.assertNotNull(parts);
+ Assert.assertEquals(3, parts.size());
+
+ parts = client.listPartitions("default", tableName, Collections.singletonList("x"),
+ (short) -1);
+ Assert.assertNotNull(parts);
+ Assert.assertEquals(1, parts.size());
+
+ parts = client.listPartitionsWithAuthInfo("default", tableName, (short) -1, "me",
+ Collections.<String>emptyList());
+ Assert.assertNotNull(parts);
+ Assert.assertEquals(3, parts.size());
+
+ List<String> partNames = client.listPartitionNames("default", tableName, (short) -1);
+ Assert.assertNotNull(partNames);
+ Assert.assertEquals(3, partNames.size());
+
+ parts = client.listPartitionsByFilter("default", tableName, "pk = \"x\"", (short) -1);
+ Assert.assertNotNull(parts);
+ Assert.assertEquals(1, parts.size());
+
+ parts = client.getPartitionsByNames("default", tableName, Collections.singletonList("pk=x"));
+ Assert.assertNotNull(parts);
+ Assert.assertEquals(1, parts.size());
+
+ partition = client.getPartition("default", tableName, Collections.singletonList("x"));
+ Assert.assertNotNull(partition);
+
+ partition = client.getPartition("default", tableName, "pk=x");
+ Assert.assertNotNull(partition);
+
+ partition = client.getPartitionWithAuthInfo("default", tableName, Collections.singletonList("x"),
+ "me", Collections.<String>emptyList());
+ Assert.assertNotNull(partition);
+
+ partition = new Partition(partition);
+ partition.getParameters().put("a", "b");
+ client.alter_partition("default", tableName, partition);
+
+ for (Partition p : parts) p.getParameters().put("c", "d");
+ client.alter_partitions("default", tableName, parts);
+
+ // Not testing get_partitions_by_expr because I don't want to hard code some byte sequence
+ // from the parser. The odds that anyone other than Hive parser would call this method seem
+ // low, since you'd have to exactly match the serliazation of the Hive parser.
+
+ // Not testing partition marking events, not used by anyone but Hive replication AFAIK
+
+ client.dropPartition("default", tableName, "pk=x", true);
+ client.dropPartition("default", tableName, Collections.singletonList("y"), true);
}
- }
-
- @Before
- public void checkIfActive() {
- Assume.assumeTrue(JdbcConnector.testActive(JdbcConnector.TEST_THRIFT, "Test Thrift "));
- rand = new Random();
- }
-
- @Test
- public void db() throws TException {
- final String dbName = "bigtop_thrift_db_" + rand.nextInt(Integer.MAX_VALUE);
-
- Database db = new Database(dbName, "a db", null, new HashMap<String, String>());
- client.createDatabase(db);
- db = client.getDatabase(dbName);
- Assert.assertNotNull(db);
- db = new Database(db);
- db.getParameters().put("a", "b");
- client.alterDatabase(dbName, db);
- List<String> alldbs = client.getDatabases("bigtop_*");
- Assert.assertNotNull(alldbs);
- Assert.assertTrue(alldbs.size() > 0);
- alldbs = client.getAllDatabases();
- Assert.assertNotNull(alldbs);
- Assert.assertTrue(alldbs.size() > 0);
- client.dropDatabase(dbName, true, true);
- }
-
- // Not testing types calls, as they aren't used AFAIK
-
- @Test
- public void nonPartitionedTable() throws TException {
- final String tableName = "bigtop_thrift_table_" + rand.nextInt(Integer.MAX_VALUE);
-
- // I don't test every operation related to tables, but only those that are frequently used.
- SerDeInfo serde = new SerDeInfo("default_serde",
- conf.getVar(HiveConf.ConfVars.HIVEDEFAULTSERDE), new HashMap<String, String>());
- FieldSchema fs = new FieldSchema("a", "int", "no comment");
- StorageDescriptor sd = new StorageDescriptor(Collections.singletonList(fs), null,
- conf.getVar(HiveConf.ConfVars.HIVEDEFAULTFILEFORMAT),
- conf.getVar(HiveConf.ConfVars.HIVEDEFAULTFILEFORMAT), false, 0, serde, null, null,
- new HashMap<String, String>());
- Table table = new Table(tableName, "default", "me", 0, 0, 0, sd, null,
- new HashMap<String, String>(), null, null, TableType.MANAGED_TABLE.toString());
- client.createTable(table);
-
- table = client.getTable("default", tableName);
- Assert.assertNotNull(table);
-
- List<Table> tables =
- client.getTableObjectsByName("default", Collections.singletonList(tableName));
- Assert.assertNotNull(tables);
- Assert.assertEquals(1, tables.size());
-
- List<String> tableNames = client.getTables("default", "bigtop_*");
- Assert.assertNotNull(tableNames);
- Assert.assertTrue(tableNames.size() >= 1);
-
- tableNames = client.getAllTables("default");
- Assert.assertNotNull(tableNames);
- Assert.assertTrue(tableNames.size() >= 1);
-
- List<FieldSchema> cols = client.getFields("default", tableName);
- Assert.assertNotNull(cols);
- Assert.assertEquals(1, cols.size());
-
- cols = client.getSchema("default", tableName);
- Assert.assertNotNull(cols);
- Assert.assertEquals(1, cols.size());
-
- table = new Table(table);
- table.getParameters().put("a", "b");
- client.alter_table("default", tableName, table, false);
-
- table.getParameters().put("c", "d");
- client.alter_table("default", tableName, table);
-
- client.dropTable("default", tableName, true, false);
- }
-
- @Test
- public void partitionedTable() throws TException {
- final String tableName = "bigtop_thrift_partitioned_table_" + rand.nextInt(Integer.MAX_VALUE);
-
- // I don't test every operation related to tables, but only those that are frequently used.
- SerDeInfo serde = new SerDeInfo("default_serde",
- conf.getVar(HiveConf.ConfVars.HIVEDEFAULTSERDE), new HashMap<String, String>());
- FieldSchema fs = new FieldSchema("a", "int", "no comment");
- StorageDescriptor sd = new StorageDescriptor(Collections.singletonList(fs), null,
- conf.getVar(HiveConf.ConfVars.HIVEDEFAULTFILEFORMAT),
- conf.getVar(HiveConf.ConfVars.HIVEDEFAULTFILEFORMAT), false, 0, serde, null, null,
- new HashMap<String, String>());
- FieldSchema pk = new FieldSchema("pk", "string", "");
- Table table = new Table(tableName, "default", "me", 0, 0, 0, sd, Collections.singletonList(pk),
- new HashMap<String, String>(), null, null, TableType.MANAGED_TABLE.toString());
- client.createTable(table);
-
- sd = new StorageDescriptor(Collections.singletonList(fs), null,
- conf.getVar(HiveConf.ConfVars.HIVEDEFAULTSERDE),
- conf.getVar(HiveConf.ConfVars.HIVEDEFAULTSERDE), false, 0, serde, null, null,
- new HashMap<String, String>());
- Partition partition = new Partition(Collections.singletonList("x"), "default", tableName, 0,
- 0, sd, new HashMap<String, String>());
- client.add_partition(partition);
-
- List<Partition> partitions = new ArrayList<>(2);
- sd = new StorageDescriptor(Collections.singletonList(fs), null,
- conf.getVar(HiveConf.ConfVars.HIVEDEFAULTSERDE),
- conf.getVar(HiveConf.ConfVars.HIVEDEFAULTSERDE), false, 0, serde, null, null,
- new HashMap<String, String>());
- partitions.add(new Partition(Collections.singletonList("y"), "default", tableName, 0,
- 0, sd, new HashMap<String, String>()));
- sd = new StorageDescriptor(Collections.singletonList(fs), null,
- conf.getVar(HiveConf.ConfVars.HIVEDEFAULTSERDE),
- conf.getVar(HiveConf.ConfVars.HIVEDEFAULTSERDE), false, 0, serde, null, null,
- new HashMap<String, String>());
- partitions.add(new Partition(Collections.singletonList("z"), "default", tableName, 0,
- 0, sd, new HashMap<String, String>()));
- client.add_partitions(partitions);
-
- List<Partition> parts = client.listPartitions("default", tableName, (short)-1);
- Assert.assertNotNull(parts);
- Assert.assertEquals(3, parts.size());
-
- parts = client.listPartitions("default", tableName, Collections.singletonList("x"),
- (short)-1);
- Assert.assertNotNull(parts);
- Assert.assertEquals(1, parts.size());
-
- parts = client.listPartitionsWithAuthInfo("default", tableName, (short)-1, "me",
- Collections.<String>emptyList());
- Assert.assertNotNull(parts);
- Assert.assertEquals(3, parts.size());
-
- List<String> partNames = client.listPartitionNames("default", tableName, (short)-1);
- Assert.assertNotNull(partNames);
- Assert.assertEquals(3, partNames.size());
-
- parts = client.listPartitionsByFilter("default", tableName, "pk = \"x\"", (short)-1);
- Assert.assertNotNull(parts);
- Assert.assertEquals(1, parts.size());
-
- parts = client.getPartitionsByNames("default", tableName, Collections.singletonList("pk=x"));
- Assert.assertNotNull(parts);
- Assert.assertEquals(1, parts.size());
-
- partition = client.getPartition("default", tableName, Collections.singletonList("x"));
- Assert.assertNotNull(partition);
-
- partition = client.getPartition("default", tableName, "pk=x");
- Assert.assertNotNull(partition);
-
- partition = client.getPartitionWithAuthInfo("default", tableName, Collections.singletonList("x"),
- "me", Collections.<String>emptyList());
- Assert.assertNotNull(partition);
-
- partition = new Partition(partition);
- partition.getParameters().put("a", "b");
- client.alter_partition("default", tableName, partition);
-
- for (Partition p : parts) p.getParameters().put("c", "d");
- client.alter_partitions("default", tableName, parts);
-
- // Not testing get_partitions_by_expr because I don't want to hard code some byte sequence
- // from the parser. The odds that anyone other than Hive parser would call this method seem
- // low, since you'd have to exactly match the serliazation of the Hive parser.
-
- // Not testing partition marking events, not used by anyone but Hive replication AFAIK
-
- client.dropPartition("default", tableName, "pk=x", true);
- client.dropPartition("default", tableName, Collections.singletonList("y"), true);
- }
-
- // Not testing index calls, as no one uses indices
-
-
- // Not sure if anyone uses stats calls or not. Other query engines might. Ignoring for now.
-
- // Not sure if anyone else uses functions, though I'm guessing not as without Hive classes they
- // won't be runable.
-
- // Not testing authorization calls as AFAIK no one else uses Hive security
-
- // Not testing transaction/locking calls, as those are used only by Hive.
-
- // Not testing notification logging calls, as those are used only by Hive replication.
+
+ // Not testing index calls, as no one uses indices
+
+
+ // Not sure if anyone uses stats calls or not. Other query engines might. Ignoring for now.
+
+ // Not sure if anyone else uses functions, though I'm guessing not as without Hive classes they
+ // won't be runable.
+
+ // Not testing authorization calls as AFAIK no one else uses Hive security
+
+ // Not testing transaction/locking calls, as those are used only by Hive.
+
+ // Not testing notification logging calls, as those are used only by Hive replication.
}