From 5e342c45364ec97f5e3530769a1cc8bdbcf69bb0 Mon Sep 17 00:00:00 2001 From: Roman Shaposhnik Date: Wed, 22 Mar 2017 08:51:22 -0700 Subject: BIGTOP-2704. Include ODPi runtime tests option into the battery of smoke tests --- bigtop-tests/smoke-tests/odpi-runtime/README.md | 48 ++ bigtop-tests/smoke-tests/odpi-runtime/build.gradle | 63 +++ .../org/odpi/specs/runtime/hadoop/ApiExaminer.java | 485 ++++++++++++++++++ .../org/odpi/specs/runtime/hive/HCatalogMR.java | 137 ++++++ .../src/main/resources/api-examiner-prep.sh | 64 +++ .../org/odpi/specs/runtime/TestSpecsRuntime.groovy | 275 +++++++++++ .../org/odpi/specs/runtime/hive/HiveHelper.java | 121 +++++ .../org/odpi/specs/runtime/hive/JdbcConnector.java | 79 +++ .../org/odpi/specs/runtime/hive/TestBeeline.java | 201 ++++++++ .../java/org/odpi/specs/runtime/hive/TestCLI.java | 213 ++++++++ .../org/odpi/specs/runtime/hive/TestHCatalog.java | 158 ++++++ .../java/org/odpi/specs/runtime/hive/TestJdbc.java | 545 +++++++++++++++++++++ .../java/org/odpi/specs/runtime/hive/TestSql.java | 337 +++++++++++++ .../org/odpi/specs/runtime/hive/TestThrift.java | 251 ++++++++++ .../src/test/python/find-public-apis.py | 80 +++ .../resources/hadoop-common-2.7.3-api-report.json | 1 + .../src/test/resources/hadoop-common-bin.list | 2 + .../src/test/resources/hadoop-common-jar.list | 60 +++ .../src/test/resources/hadoop-common.list | 230 +++++++++ .../resources/hadoop-hdfs-2.7.3-api-report.json | 1 + .../src/test/resources/hadoop-hdfs-bin.list | 1 + .../src/test/resources/hadoop-hdfs-jar.list | 25 + .../src/test/resources/hadoop-hdfs.list | 79 +++ .../src/test/resources/hadoop-mapreduce-bin.list | 1 + ...oop-mapreduce-client-core-2.7.3-api-report.json | 1 + .../src/test/resources/hadoop-mapreduce-jar.list | 22 + .../src/test/resources/hadoop-mapreduce.list | 123 +++++ .../src/test/resources/hadoop-subprojs.list | 4 + .../hadoop-yarn-api-2.7.3-api-report.json | 1 + .../src/test/resources/hadoop-yarn-bin.list | 3 + .../hadoop-yarn-client-2.7.3-api-report.json | 1 + .../hadoop-yarn-common-2.7.3-api-report.json | 1 + .../src/test/resources/hadoop-yarn-jar.list | 38 ++ .../src/test/resources/hadoop-yarn.list | 74 +++ .../src/test/resources/testRuntimeSpecConf.groovy | 430 ++++++++++++++++ bigtop-tests/spec-tests/README.md | 48 -- bigtop-tests/spec-tests/build.gradle | 63 --- bigtop-tests/spec-tests/runtime/build.gradle | 63 --- .../org/odpi/specs/runtime/hadoop/ApiExaminer.java | 485 ------------------ .../org/odpi/specs/runtime/hive/HCatalogMR.java | 137 ------ .../src/main/resources/api-examiner-prep.sh | 64 --- .../org/odpi/specs/runtime/TestSpecsRuntime.groovy | 275 ----------- .../org/odpi/specs/runtime/hive/HiveHelper.java | 121 ----- .../org/odpi/specs/runtime/hive/JdbcConnector.java | 79 --- .../org/odpi/specs/runtime/hive/TestBeeline.java | 201 -------- .../java/org/odpi/specs/runtime/hive/TestCLI.java | 213 -------- .../org/odpi/specs/runtime/hive/TestHCatalog.java | 158 ------ .../java/org/odpi/specs/runtime/hive/TestJdbc.java | 545 --------------------- .../java/org/odpi/specs/runtime/hive/TestSql.java | 337 ------------- .../org/odpi/specs/runtime/hive/TestThrift.java | 251 ---------- .../runtime/src/test/python/find-public-apis.py | 80 --- .../resources/hadoop-common-2.7.3-api-report.json | 1 - .../src/test/resources/hadoop-common-bin.list | 2 - .../src/test/resources/hadoop-common-jar.list | 60 --- .../runtime/src/test/resources/hadoop-common.list | 230 --------- .../resources/hadoop-hdfs-2.7.3-api-report.json | 1 - .../src/test/resources/hadoop-hdfs-bin.list | 1 - .../src/test/resources/hadoop-hdfs-jar.list | 25 - .../runtime/src/test/resources/hadoop-hdfs.list | 79 --- .../src/test/resources/hadoop-mapreduce-bin.list | 1 - ...oop-mapreduce-client-core-2.7.3-api-report.json | 1 - .../src/test/resources/hadoop-mapreduce-jar.list | 22 - .../src/test/resources/hadoop-mapreduce.list | 123 ----- .../src/test/resources/hadoop-subprojs.list | 4 - .../hadoop-yarn-api-2.7.3-api-report.json | 1 - .../src/test/resources/hadoop-yarn-bin.list | 3 - .../hadoop-yarn-client-2.7.3-api-report.json | 1 - .../hadoop-yarn-common-2.7.3-api-report.json | 1 - .../src/test/resources/hadoop-yarn-jar.list | 38 -- .../runtime/src/test/resources/hadoop-yarn.list | 74 --- .../src/test/resources/testRuntimeSpecConf.groovy | 430 ---------------- build.gradle | 3 +- settings.gradle | 5 - 73 files changed, 4157 insertions(+), 4224 deletions(-) create mode 100644 bigtop-tests/smoke-tests/odpi-runtime/README.md create mode 100644 bigtop-tests/smoke-tests/odpi-runtime/build.gradle create mode 100644 bigtop-tests/smoke-tests/odpi-runtime/src/main/java/org/odpi/specs/runtime/hadoop/ApiExaminer.java create mode 100644 bigtop-tests/smoke-tests/odpi-runtime/src/main/java/org/odpi/specs/runtime/hive/HCatalogMR.java create mode 100755 bigtop-tests/smoke-tests/odpi-runtime/src/main/resources/api-examiner-prep.sh create mode 100644 bigtop-tests/smoke-tests/odpi-runtime/src/test/groovy/org/odpi/specs/runtime/TestSpecsRuntime.groovy create mode 100644 bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/HiveHelper.java create mode 100644 bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/JdbcConnector.java create mode 100644 bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/TestBeeline.java create mode 100644 bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/TestCLI.java create mode 100644 bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/TestHCatalog.java create mode 100644 bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/TestJdbc.java create mode 100644 bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/TestSql.java create mode 100644 bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/TestThrift.java create mode 100755 bigtop-tests/smoke-tests/odpi-runtime/src/test/python/find-public-apis.py create mode 100644 bigtop-tests/smoke-tests/odpi-runtime/src/test/resources/hadoop-common-2.7.3-api-report.json create mode 100644 bigtop-tests/smoke-tests/odpi-runtime/src/test/resources/hadoop-common-bin.list create mode 100644 bigtop-tests/smoke-tests/odpi-runtime/src/test/resources/hadoop-common-jar.list create mode 100644 bigtop-tests/smoke-tests/odpi-runtime/src/test/resources/hadoop-common.list create mode 100644 bigtop-tests/smoke-tests/odpi-runtime/src/test/resources/hadoop-hdfs-2.7.3-api-report.json create mode 100644 bigtop-tests/smoke-tests/odpi-runtime/src/test/resources/hadoop-hdfs-bin.list create mode 100644 bigtop-tests/smoke-tests/odpi-runtime/src/test/resources/hadoop-hdfs-jar.list create mode 100644 bigtop-tests/smoke-tests/odpi-runtime/src/test/resources/hadoop-hdfs.list create mode 100644 bigtop-tests/smoke-tests/odpi-runtime/src/test/resources/hadoop-mapreduce-bin.list create mode 100644 bigtop-tests/smoke-tests/odpi-runtime/src/test/resources/hadoop-mapreduce-client-core-2.7.3-api-report.json create mode 100644 bigtop-tests/smoke-tests/odpi-runtime/src/test/resources/hadoop-mapreduce-jar.list create mode 100644 bigtop-tests/smoke-tests/odpi-runtime/src/test/resources/hadoop-mapreduce.list create mode 100644 bigtop-tests/smoke-tests/odpi-runtime/src/test/resources/hadoop-subprojs.list create mode 100644 bigtop-tests/smoke-tests/odpi-runtime/src/test/resources/hadoop-yarn-api-2.7.3-api-report.json create mode 100644 bigtop-tests/smoke-tests/odpi-runtime/src/test/resources/hadoop-yarn-bin.list create mode 100644 bigtop-tests/smoke-tests/odpi-runtime/src/test/resources/hadoop-yarn-client-2.7.3-api-report.json create mode 100644 bigtop-tests/smoke-tests/odpi-runtime/src/test/resources/hadoop-yarn-common-2.7.3-api-report.json create mode 100644 bigtop-tests/smoke-tests/odpi-runtime/src/test/resources/hadoop-yarn-jar.list create mode 100644 bigtop-tests/smoke-tests/odpi-runtime/src/test/resources/hadoop-yarn.list create mode 100644 bigtop-tests/smoke-tests/odpi-runtime/src/test/resources/testRuntimeSpecConf.groovy delete mode 100644 bigtop-tests/spec-tests/README.md delete mode 100644 bigtop-tests/spec-tests/build.gradle delete mode 100644 bigtop-tests/spec-tests/runtime/build.gradle delete mode 100644 bigtop-tests/spec-tests/runtime/src/main/java/org/odpi/specs/runtime/hadoop/ApiExaminer.java delete mode 100644 bigtop-tests/spec-tests/runtime/src/main/java/org/odpi/specs/runtime/hive/HCatalogMR.java delete mode 100755 bigtop-tests/spec-tests/runtime/src/main/resources/api-examiner-prep.sh delete mode 100644 bigtop-tests/spec-tests/runtime/src/test/groovy/org/odpi/specs/runtime/TestSpecsRuntime.groovy delete mode 100644 bigtop-tests/spec-tests/runtime/src/test/java/org/odpi/specs/runtime/hive/HiveHelper.java delete mode 100644 bigtop-tests/spec-tests/runtime/src/test/java/org/odpi/specs/runtime/hive/JdbcConnector.java delete mode 100644 bigtop-tests/spec-tests/runtime/src/test/java/org/odpi/specs/runtime/hive/TestBeeline.java delete mode 100644 bigtop-tests/spec-tests/runtime/src/test/java/org/odpi/specs/runtime/hive/TestCLI.java delete mode 100644 bigtop-tests/spec-tests/runtime/src/test/java/org/odpi/specs/runtime/hive/TestHCatalog.java delete mode 100644 bigtop-tests/spec-tests/runtime/src/test/java/org/odpi/specs/runtime/hive/TestJdbc.java delete mode 100644 bigtop-tests/spec-tests/runtime/src/test/java/org/odpi/specs/runtime/hive/TestSql.java delete mode 100644 bigtop-tests/spec-tests/runtime/src/test/java/org/odpi/specs/runtime/hive/TestThrift.java delete mode 100755 bigtop-tests/spec-tests/runtime/src/test/python/find-public-apis.py delete mode 100644 bigtop-tests/spec-tests/runtime/src/test/resources/hadoop-common-2.7.3-api-report.json delete mode 100644 bigtop-tests/spec-tests/runtime/src/test/resources/hadoop-common-bin.list delete mode 100644 bigtop-tests/spec-tests/runtime/src/test/resources/hadoop-common-jar.list delete mode 100644 bigtop-tests/spec-tests/runtime/src/test/resources/hadoop-common.list delete mode 100644 bigtop-tests/spec-tests/runtime/src/test/resources/hadoop-hdfs-2.7.3-api-report.json delete mode 100644 bigtop-tests/spec-tests/runtime/src/test/resources/hadoop-hdfs-bin.list delete mode 100644 bigtop-tests/spec-tests/runtime/src/test/resources/hadoop-hdfs-jar.list delete mode 100644 bigtop-tests/spec-tests/runtime/src/test/resources/hadoop-hdfs.list delete mode 100644 bigtop-tests/spec-tests/runtime/src/test/resources/hadoop-mapreduce-bin.list delete mode 100644 bigtop-tests/spec-tests/runtime/src/test/resources/hadoop-mapreduce-client-core-2.7.3-api-report.json delete mode 100644 bigtop-tests/spec-tests/runtime/src/test/resources/hadoop-mapreduce-jar.list delete mode 100644 bigtop-tests/spec-tests/runtime/src/test/resources/hadoop-mapreduce.list delete mode 100644 bigtop-tests/spec-tests/runtime/src/test/resources/hadoop-subprojs.list delete mode 100644 bigtop-tests/spec-tests/runtime/src/test/resources/hadoop-yarn-api-2.7.3-api-report.json delete mode 100644 bigtop-tests/spec-tests/runtime/src/test/resources/hadoop-yarn-bin.list delete mode 100644 bigtop-tests/spec-tests/runtime/src/test/resources/hadoop-yarn-client-2.7.3-api-report.json delete mode 100644 bigtop-tests/spec-tests/runtime/src/test/resources/hadoop-yarn-common-2.7.3-api-report.json delete mode 100644 bigtop-tests/spec-tests/runtime/src/test/resources/hadoop-yarn-jar.list delete mode 100644 bigtop-tests/spec-tests/runtime/src/test/resources/hadoop-yarn.list delete mode 100644 bigtop-tests/spec-tests/runtime/src/test/resources/testRuntimeSpecConf.groovy diff --git a/bigtop-tests/smoke-tests/odpi-runtime/README.md b/bigtop-tests/smoke-tests/odpi-runtime/README.md new file mode 100644 index 00000000..8fde997f --- /dev/null +++ b/bigtop-tests/smoke-tests/odpi-runtime/README.md @@ -0,0 +1,48 @@ +Licensed to the Apache Software Foundation (ASF) under one or more +contributor license agreements. See the NOTICE file distributed with +this work for additional information regarding copyright ownership. +The ASF licenses this file to You under the Apache License, Version 2.0 +(the "License"); you may not use this file except in compliance with +the License. You may obtain a copy of the License at + +http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. + +Test suite to validate Hadoop basic specifications +================================================== + +The test suite is intended to be used as a validation tool to make sure that a +Hadoop stack derived from Apache Bigtop is still compliant with it. The +minimalistic way of doing so would be to guarantee compatibility of the +environment, binaries layouts, certain configuration parameters, and so on. + +Validation test suite for the specs is vaguely based on Apache Bigtop iTest and +consists of two essential parts: a configuration file, communicating the +functional commands and expected outcome(s) of it; and the test driver to run +the commands and compare the results. + +Running the tests +================= + +Tests could be executed by running the following command +``` + gradle :bigtop-tests:spec-tests:runtime:test -Pspec.tests --info +``` +======= +consists of two essential parts: a configuration file, communicating the +functional commands and expected outcome(s) of it; and the test driver to run +the commands and compare the results. + +Running the tests +================= + +Tests could be executed by running the following command +``` + gradle :bigtop-tests:spec-tests:runtime:test -Pspec.tests --info +``` + diff --git a/bigtop-tests/smoke-tests/odpi-runtime/build.gradle b/bigtop-tests/smoke-tests/odpi-runtime/build.gradle new file mode 100644 index 00000000..97e36353 --- /dev/null +++ b/bigtop-tests/smoke-tests/odpi-runtime/build.gradle @@ -0,0 +1,63 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + *

+ * http://www.apache.org/licenses/LICENSE-2.0 + *

+ * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +def junitVersion = '4.11' + +apply plugin: 'java' + +repositories { + maven { + url "http://conjars.org/repo/" + } +} +dependencies { + compile group: 'junit', name: 'junit', version: junitVersion, transitive: 'true' + compile group: 'commons-logging', name: 'commons-logging', version: '1.1.3' + compile group: 'org.apache.commons', name: 'commons-exec', version: '1.3' + compile group: 'org.apache.hive', name: 'hive-jdbc', version: '1.2.1' + compile group: 'org.apache.hive', name: 'hive-metastore', version: '1.2.1' + compile group: 'org.apache.hive', name: 'hive-common', version: '1.2.1' + compile group: 'org.apache.thrift', name: 'libfb303', version: '0.9.3' + compile group: 'org.apache.thrift', name: 'libthrift', version: '0.9.3' + compile group: 'org.apache.hadoop', name: 'hadoop-common', version: '2.7.2' + compile group: 'org.apache.hive.hcatalog', name: 'hive-hcatalog-core', version: '1.2.1' + testCompile group: 'org.apache.hadoop', name: 'hadoop-mapreduce-client-core', version: '2.7.2' + compile group: 'org.apache.hadoop', name: 'hadoop-mapreduce-client-jobclient', version: '2.7.2' + testCompile group: 'org.apache.hadoop', name: 'hadoop-mapreduce-client-common', version: '2.7.2' + testCompile group: 'org.apache.hadoop', name: 'hadoop-hdfs', version: '2.7.2' + testCompile group: 'org.apache.hive', name: 'hive-exec', version: '1.2.1' + testCompile "junit:junit:4.11" + if (System.env.HADOOP_CONF_DIR) testRuntime files(System.env.HADOOP_CONF_DIR) +} + +jar { + from { + (configurations.runtime).grep{it.toString() =~ /(hive|libfb303)-.*[jw]ar$/}.collect { + zipTree(it) + } + } + + exclude 'META-INF/*.RSA', 'META-INF/*.SF','META-INF/*.DSA' +} + +test { + // Change the default location where test data is picked up + systemProperty 'test.resources.dir', "${buildDir}/resources/test/" + systemProperty 'odpi.test.hive.hcat.job.jar', jar.archivePath + systemProperty 'odpi.test.hive.hcat.core.jar', (configurations.runtime).find { it.toString() =~ /hive-hcatalog-core-.*jar$/ } +} +test.dependsOn jar diff --git a/bigtop-tests/smoke-tests/odpi-runtime/src/main/java/org/odpi/specs/runtime/hadoop/ApiExaminer.java b/bigtop-tests/smoke-tests/odpi-runtime/src/main/java/org/odpi/specs/runtime/hadoop/ApiExaminer.java new file mode 100644 index 00000000..d95c010d --- /dev/null +++ b/bigtop-tests/smoke-tests/odpi-runtime/src/main/java/org/odpi/specs/runtime/hadoop/ApiExaminer.java @@ -0,0 +1,485 @@ +/** + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + *

+ * http://www.apache.org/licenses/LICENSE-2.0 + *

+ * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.odpi.specs.runtime.hadoop; + +import org.apache.commons.cli.CommandLine; +import org.apache.commons.cli.GnuParser; +import org.apache.commons.cli.HelpFormatter; +import org.apache.commons.cli.Options; +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.apache.hadoop.classification.InterfaceAudience; +import org.apache.hadoop.classification.InterfaceStability; +import org.codehaus.jackson.annotate.JsonIgnore; +import org.codehaus.jackson.map.ObjectMapper; + +import java.io.File; +import java.io.IOException; +import java.lang.reflect.Method; +import java.util.ArrayList; +import java.util.Enumeration; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.jar.JarEntry; +import java.util.jar.JarFile; +import java.util.regex.Matcher; +import java.util.regex.Pattern; + +/** + * A tool that generates API conformance tests for Hadoop libraries + */ +public class ApiExaminer { + + private static final Log LOG = LogFactory.getLog(ApiExaminer.class.getName()); + + static private Set unloadableClasses; + + private List errors; + private List warnings; + + static { + unloadableClasses = new HashSet<>(); + unloadableClasses.add("org.apache.hadoop.security.JniBasedUnixGroupsMapping"); + unloadableClasses.add("org.apache.hadoop.security.JniBasedUnixGroupsNetgroupMapping"); + unloadableClasses.add("org.apache.hadoop.io.compress.lz4.Lz4Compressor"); + unloadableClasses.add("org.apache.hadoop.record.compiler.ant.RccTask"); + + } + + public static void main(String[] args) { + Options options = new Options(); + + options.addOption("c", "compare", true, + "Compare against a spec, argument is the json file containing spec"); + options.addOption("h", "help", false, "You're looking at it"); + options.addOption("j", "jar", true, "Jar to examine"); + options.addOption("p", "prepare-spec", true, + "Prepare the spec, argument is the directory to write the spec to"); + + try { + CommandLine cli = new GnuParser().parse(options, args); + + if (cli.hasOption('h')) { + usage(options); + return; + } + + if ((!cli.hasOption('c') && !cli.hasOption('p')) || + (cli.hasOption('c') && cli.hasOption('p'))) { + System.err.println("You must choose either -c or -p"); + usage(options); + return; + } + + if (!cli.hasOption('j')) { + System.err.println("You must specify the jar to prepare or compare"); + usage(options); + return; + } + + String jar = cli.getOptionValue('j'); + ApiExaminer examiner = new ApiExaminer(); + + if (cli.hasOption('c')) { + examiner.compareAgainstStandard(cli.getOptionValue('c'), jar); + } else if (cli.hasOption('p')) { + examiner.prepareExpected(jar, cli.getOptionValue('p')); + } + } catch (Exception e) { + System.err.println("Received exception while processing"); + e.printStackTrace(); + } + } + + private static void usage(Options options) { + HelpFormatter help = new HelpFormatter(); + help.printHelp("api-examiner", options); + + } + + private ApiExaminer() { + } + + private void prepareExpected(String jarFile, String outputDir) throws IOException, + ClassNotFoundException { + JarInfo jarInfo = new JarInfo(jarFile, this); + jarInfo.dumpToFile(new File(outputDir)); + } + + private void compareAgainstStandard(String json, String jarFile) throws IOException, + ClassNotFoundException { + errors = new ArrayList<>(); + warnings = new ArrayList<>(); + JarInfo underTest = new JarInfo(jarFile, this); + JarInfo standard = jarInfoFromFile(new File(json)); + standard.compareAndReport(underTest); + + if (errors.size() > 0) { + System.err.println("Found " + errors.size() + " incompatibilities:"); + for (String error : errors) { + System.err.println(error); + } + } + + if (warnings.size() > 0) { + System.err.println("Found " + warnings.size() + " possible issues: "); + for (String warning : warnings) { + System.err.println(warning); + } + } + + + } + + private JarInfo jarInfoFromFile(File inputFile) throws IOException { + ObjectMapper mapper = new ObjectMapper(); + JarInfo jarInfo = mapper.readValue(inputFile, JarInfo.class); + jarInfo.patchUpClassBackPointers(this); + return jarInfo; + } + + private static class JarInfo { + String name; + String version; + ApiExaminer container; + Map classes; + + // For use by Jackson + public JarInfo() { + + } + + JarInfo(String jarFile, ApiExaminer container) throws IOException, ClassNotFoundException { + this.container = container; + LOG.info("Processing jar " + jarFile); + File f = new File(jarFile); + Pattern pattern = Pattern.compile("(hadoop-[a-z\\-]+)-([0-9]\\.[0-9]\\.[0-9]).*"); + Matcher matcher = pattern.matcher(f.getName()); + if (!matcher.matches()) { + String msg = "Unable to determine name and version from " + f.getName(); + LOG.error(msg); + throw new RuntimeException(msg); + } + name = matcher.group(1); + version = matcher.group(2); + classes = new HashMap<>(); + + JarFile jar = new JarFile(jarFile); + Enumeration entries = jar.entries(); + while (entries.hasMoreElements()) { + String name = entries.nextElement().getName(); + if (name.endsWith(".class")) { + name = name.substring(0, name.length() - 6); + name = name.replace('/', '.'); + if (!unloadableClasses.contains(name)) { + LOG.debug("Processing class " + name); + Class clazz = Class.forName(name); + if (clazz.getAnnotation(InterfaceAudience.Public.class) != null && + clazz.getAnnotation(InterfaceStability.Stable.class) != null) { + classes.put(name, new ClassInfo(this, clazz)); + } + } + } + } + } + + public String getName() { + return name; + } + + public void setName(String name) { + this.name = name; + } + + public String getVersion() { + return version; + } + + public void setVersion(String version) { + this.version = version; + } + + public Map getClasses() { + return classes; + } + + public void setClasses(Map classes) { + this.classes = classes; + } + + void compareAndReport(JarInfo underTest) { + Set underTestClasses = new HashSet<>(underTest.classes.values()); + for (ClassInfo classInfo : classes.values()) { + if (underTestClasses.contains(classInfo)) { + classInfo.compareAndReport(underTest.classes.get(classInfo.name)); + underTestClasses.remove(classInfo); + } else { + container.errors.add(underTest + " does not contain class " + classInfo); + } + } + + if (underTestClasses.size() > 0) { + for (ClassInfo extra : underTestClasses) { + container.warnings.add(underTest + " contains extra class " + extra); + } + } + } + + void dumpToFile(File outputDir) throws IOException { + File output = new File(outputDir, name + "-" + version + "-api-report.json"); + ObjectMapper mapper = new ObjectMapper(); + mapper.writeValue(output, this); + } + + void patchUpClassBackPointers(ApiExaminer container) { + this.container = container; + for (ClassInfo classInfo : classes.values()) { + classInfo.setJar(this); + classInfo.patchUpBackMethodBackPointers(); + } + } + + @Override + public boolean equals(Object other) { + if (!(other instanceof JarInfo)) return false; + JarInfo that = (JarInfo)other; + return name.equals(that.name) && version.equals(that.version); + } + + @Override + public String toString() { + return name + "-" + version; + } + } + + private static class ClassInfo { + @JsonIgnore JarInfo jar; + String name; + Map methods; + + // For use by Jackson + public ClassInfo() { + + } + + ClassInfo(JarInfo jar, Class clazz) { + this.jar = jar; + this.name = clazz.getName(); + methods = new HashMap<>(); + + for (Method method : clazz.getMethods()) { + if (method.getDeclaringClass().equals(clazz)) { + LOG.debug("Processing method " + method.getName()); + MethodInfo mi = new MethodInfo(this, method); + methods.put(mi.toString(), mi); + } + } + } + + public JarInfo getJar() { + return jar; + } + + public void setJar(JarInfo jar) { + this.jar = jar; + } + + public String getName() { + return name; + } + + public void setName(String name) { + this.name = name; + } + + public Map getMethods() { + return methods; + } + + public void setMethods(Map methods) { + this.methods = methods; + } + + void compareAndReport(ClassInfo underTest) { + // Make a copy so we can remove them as we match them, making it easy to find additional ones + Set underTestMethods = new HashSet<>(underTest.methods.values()); + for (MethodInfo methodInfo : methods.values()) { + if (underTestMethods.contains(methodInfo)) { + methodInfo.compareAndReport(underTest.methods.get(methodInfo.toString())); + underTestMethods.remove(methodInfo); + } else { + jar.container.errors.add(underTest + " does not contain method " + methodInfo); + } + } + + if (underTestMethods.size() > 0) { + for (MethodInfo extra : underTestMethods) { + jar.container.warnings.add(underTest + " contains extra method " + extra); + } + } + } + + void patchUpBackMethodBackPointers() { + for (MethodInfo methodInfo : methods.values()) methodInfo.setContainingClass(this); + } + + @Override + public boolean equals(Object other) { + if (!(other instanceof ClassInfo)) return false; + ClassInfo that = (ClassInfo)other; + return name.equals(that.name); // Classes can be compared just on names + } + + @Override + public int hashCode() { + return name.hashCode(); + } + + @Override + public String toString() { + return jar + " " + name; + } + } + + private static class MethodInfo { + @JsonIgnore ClassInfo containingClass; + String name; + String returnType; + List args; + Set exceptions; + + // For use by Jackson + public MethodInfo() { + + } + + MethodInfo(ClassInfo containingClass, Method method) { + this.containingClass = containingClass; + this.name = method.getName(); + args = new ArrayList<>(); + for (Class argClass : method.getParameterTypes()) { + args.add(argClass.getName()); + } + returnType = method.getReturnType().getName(); + exceptions = new HashSet<>(); + for (Class exception : method.getExceptionTypes()) { + exceptions.add(exception.getName()); + } + } + + public ClassInfo getContainingClass() { + return containingClass; + } + + public void setContainingClass(ClassInfo containingClass) { + this.containingClass = containingClass; + } + + public String getName() { + return name; + } + + public void setName(String name) { + this.name = name; + } + + public String getReturnType() { + return returnType; + } + + public void setReturnType(String returnType) { + this.returnType = returnType; + } + + public List getArgs() { + return args; + } + + public void setArgs(List args) { + this.args = args; + } + + public Set getExceptions() { + return exceptions; + } + + public void setExceptions(Set exceptions) { + this.exceptions = exceptions; + } + + void compareAndReport(MethodInfo underTest) { + // Check to see if they've added or removed exceptions + // Make a copy so I can remove them as I check them off and easily find any that have been + // added. + Set underTestExceptions = new HashSet<>(underTest.exceptions); + for (String exception : exceptions) { + if (underTest.exceptions.contains(exception)) { + underTestExceptions.remove(exception); + } else { + containingClass.jar.container.warnings.add(underTest.containingClass.jar + " " + + underTest.containingClass + "." + name + " removes exception " + exception); + } + } + if (underTestExceptions.size() > 0) { + for (String underTestException : underTest.exceptions) { + containingClass.jar.container.warnings.add(underTest.containingClass.jar + " " + + underTest.containingClass + "." + name + " adds exception " + underTestException); + } + } + } + + @Override + public boolean equals(Object other) { + if (!(other instanceof MethodInfo)) return false; + MethodInfo that = (MethodInfo)other; + + return containingClass.equals(that.containingClass) && name.equals(that.name) && + returnType.equals(that.returnType) && args.equals(that.args); + } + + @Override + public int hashCode() { + return ((containingClass.hashCode() * 31 + name.hashCode()) * 31 + returnType.hashCode()) * 31 + + args.hashCode(); + } + + @Override + public String toString() { + StringBuilder buf = new StringBuilder(returnType) + .append(" ") + .append(name) + .append('('); + boolean first = true; + for (String arg : args) { + if (first) first = false; + else buf.append(", "); + buf.append(arg); + } + buf.append(")"); + if (exceptions.size() > 0) { + buf.append(" throws "); + first = true; + for (String exception : exceptions) { + if (first) first = false; + else buf.append(", "); + buf.append(exception); + } + } + return buf.toString(); + } + } +} diff --git a/bigtop-tests/smoke-tests/odpi-runtime/src/main/java/org/odpi/specs/runtime/hive/HCatalogMR.java b/bigtop-tests/smoke-tests/odpi-runtime/src/main/java/org/odpi/specs/runtime/hive/HCatalogMR.java new file mode 100644 index 00000000..4110d5d6 --- /dev/null +++ b/bigtop-tests/smoke-tests/odpi-runtime/src/main/java/org/odpi/specs/runtime/hive/HCatalogMR.java @@ -0,0 +1,137 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + *

+ * http://www.apache.org/licenses/LICENSE-2.0 + *

+ * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.odpi.specs.runtime.hive; + +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.conf.Configured; +import org.apache.hadoop.io.IntWritable; +import org.apache.hadoop.io.Text; +import org.apache.hadoop.io.WritableComparable; +import org.apache.hadoop.mapreduce.Job; +import org.apache.hadoop.mapreduce.Mapper; +import org.apache.hadoop.mapreduce.Reducer; +import org.apache.hadoop.util.GenericOptionsParser; +import org.apache.hadoop.util.Tool; +import org.apache.hadoop.util.ToolRunner; +import org.apache.hive.hcatalog.data.DefaultHCatRecord; +import org.apache.hive.hcatalog.data.HCatRecord; +import org.apache.hive.hcatalog.data.schema.HCatSchema; +import org.apache.hive.hcatalog.data.schema.HCatSchemaUtils; +import org.apache.hive.hcatalog.mapreduce.HCatInputFormat; +import org.apache.hive.hcatalog.mapreduce.HCatOutputFormat; +import org.apache.hive.hcatalog.mapreduce.OutputJobInfo; + +import java.io.IOException; +import java.net.URI; +import java.util.StringTokenizer; + +public class HCatalogMR extends Configured implements Tool { + private final static String INPUT_SCHEMA = "odpi.test.hcat.schema.input"; + private final static String OUTPUT_SCHEMA = "odpi.test.hcat.schema.output"; + + @Override + public int run(String[] args) throws Exception { + String inputTable = null; + String outputTable = null; + String inputSchemaStr = null; + String outputSchemaStr = null; + for(int i = 0; i < args.length; i++){ + if(args[i].equalsIgnoreCase("-it")){ + inputTable = args[i+1]; + }else if(args[i].equalsIgnoreCase("-ot")){ + outputTable = args[i+1]; + }else if(args[i].equalsIgnoreCase("-is")){ + inputSchemaStr = args[i+1]; + }else if(args[i].equalsIgnoreCase("-os")){ + outputSchemaStr = args[i+1]; + } + } + + Configuration conf = getConf(); + args = new GenericOptionsParser(conf, args).getRemainingArgs(); + + conf.set(INPUT_SCHEMA, inputSchemaStr); + conf.set(OUTPUT_SCHEMA, outputSchemaStr); + + Job job = new Job(conf, "odpi_hcat_test"); + HCatInputFormat.setInput(job, "default", inputTable); + + job.setInputFormatClass(HCatInputFormat.class); + job.setJarByClass(HCatalogMR.class); + job.setMapperClass(Map.class); + job.setReducerClass(Reduce.class); + job.setMapOutputKeyClass(Text.class); + job.setMapOutputValueClass(IntWritable.class); + job.setOutputKeyClass(WritableComparable.class); + job.setOutputValueClass(HCatRecord.class); + HCatOutputFormat.setOutput(job, OutputJobInfo.create("default", outputTable, null)); + HCatOutputFormat.setSchema(job, HCatSchemaUtils.getHCatSchema(outputSchemaStr)); + job.setOutputFormatClass(HCatOutputFormat.class); + + return job.waitForCompletion(true) ? 0 : 1; + + + } + public static class Map extends Mapper { + private final static IntWritable one = new IntWritable(1); + private Text word = new Text(); + private HCatSchema inputSchema = null; + + @Override + protected void map(WritableComparable key, HCatRecord value, Context context) + throws IOException, InterruptedException { + if (inputSchema == null) { + inputSchema = + HCatSchemaUtils.getHCatSchema(context.getConfiguration().get(INPUT_SCHEMA)); + } + String line = value.getString("line", inputSchema); + StringTokenizer tokenizer = new StringTokenizer(line); + while (tokenizer.hasMoreTokens()) { + word.set(tokenizer.nextToken()); + context.write(word, one); + } + } + } + + public static class Reduce extends Reducer { + private HCatSchema outputSchema = null; + + @Override + protected void reduce(Text key, Iterable values, Context context) throws + IOException, InterruptedException { + if (outputSchema == null) { + outputSchema = + HCatSchemaUtils.getHCatSchema(context.getConfiguration().get(OUTPUT_SCHEMA)); + } + int sum = 0; + for (IntWritable i : values) { + sum += i.get(); + } + HCatRecord output = new DefaultHCatRecord(2); + output.set("word", outputSchema, key); + output.set("count", outputSchema, sum); + context.write(null, output); + } + } + + public static void main(String[] args) throws Exception { + int exitCode = ToolRunner.run(new HCatalogMR(), args); + System.exit(exitCode); + } + } diff --git a/bigtop-tests/smoke-tests/odpi-runtime/src/main/resources/api-examiner-prep.sh b/bigtop-tests/smoke-tests/odpi-runtime/src/main/resources/api-examiner-prep.sh new file mode 100755 index 00000000..8c9ab5e6 --- /dev/null +++ b/bigtop-tests/smoke-tests/odpi-runtime/src/main/resources/api-examiner-prep.sh @@ -0,0 +1,64 @@ +#!/usr/bin/env bash + +############################################################################ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +############################################################################ + +############################################################################ +# This script is used to generate the hadoop-*-api.report.json files in the +# test/resources directory. To use it, you will first need to download an +# Apache binary distribution of Hadoop and set APACHE_HADOOP_DIR to the +# directory where you untar that distribution. You will then need to set +# BIGTTOP_HOME to the directory where your bigtop source is located. Then +# run this script for each of the jars you want to generate a report for. +# The arguments passed to this script should be -p -j +# where outputdir is the directory you'd like to write the report to and +# jarfile is the full path of the jar to generate the report for. Reports +# should be generated for the following jars: hadoop-common, hadoop-hdfs, +# hadoop-yarn-common, hadoop-yarn-client, hadoop-yarn-api, and +# hadoop-mapreduce-client-core +# +# Example usage: +# export APACHE_HADOOP_DIR=/tmp/hadoop-2.7.3 +# export BIGTOP_HOME=/home/me/git/bigtop +# $BIGTOP_HOME/bigtop-tests/spec-tests/runtime/src/main/resources/api-examiner.sh -j $HADOOP_HOME/share/hadoop/common/hadoop-common-2.7.3.jar -p $BIGTOP_HOME/bigtop-tests/spec-tests/runtime/src/test/resources +# +# The resulting reports should be committed to git. This script only needs +# to be run once per ODPi release. +############################################################################ + + +if [ "x${APACHE_HADOOP_DIR}" = "x" ] +then + echo "You must set APACHE_HADOOP_DIR to the directory you have placed the Apache Hadoop binary distribution in" + exit 1 +fi + +if [ "x${BIGTOP_HOME}" = "x" ] +then + echo "You must set BIGTOP_HOME to the root directory for your bigtop source" + exit 1 +fi + +for jar in `find $BIGTOP_HOME/bigtop-tests/spec-tests/runtime/build/libs/ -name \*.jar` +do + CLASSPATH=$CLASSPATH:$jar +done + +for jar in `find $APACHE_HADOOP_DIR -name \*.jar` +do + CLASSPATH=$CLASSPATH:$jar +done + +java -cp $CLASSPATH org.odpi.specs.runtime.hadoop.ApiExaminer $@ + diff --git a/bigtop-tests/smoke-tests/odpi-runtime/src/test/groovy/org/odpi/specs/runtime/TestSpecsRuntime.groovy b/bigtop-tests/smoke-tests/odpi-runtime/src/test/groovy/org/odpi/specs/runtime/TestSpecsRuntime.groovy new file mode 100644 index 00000000..bc2a3b20 --- /dev/null +++ b/bigtop-tests/smoke-tests/odpi-runtime/src/test/groovy/org/odpi/specs/runtime/TestSpecsRuntime.groovy @@ -0,0 +1,275 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + *

+ * http://www.apache.org/licenses/LICENSE-2.0 + *

+ * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.odpi.specs.runtime + +import groovy.io.FileType +import org.junit.Assert +import org.apache.bigtop.itest.shell.* +import org.junit.Test +import org.junit.runner.RunWith +import org.junit.runners.Parameterized +import org.junit.runners.Parameterized.Parameters + +import java.util.regex.Matcher +import java.util.regex.Pattern + +/** + * Check all expected environment + * Tests are constructed dynamically, using external DSL to define + * - test name + * - test type + * - command to execute the test + * - expected pattern of the output + */ +@RunWith(Parameterized.class) +public class TestSpecsRuntime { + private String testName + private String type + private Map arguments + + private static ENV = System.getenv() + + @Parameters(name="{0}") + public static Collection allTests() { + List specs = []; + + config.specs.tests.each { test -> + specs.add([test.value.name, test.value.type, test.value.arguments] as Object[]) + } + return specs + } + + public TestSpecsRuntime (String testName, String type, Map arguments) { + this.testName = testName + this.type = type + this.arguments = arguments + } + + public static final String testsList = System.properties['test.resources.dir'] ?: + "${System.properties['buildDir']}/resources/test" + def final static config = new ConfigSlurper().parse(new URL("file:${getTestConfigName()}")) + + private static String getTestConfigName() { + return "$testsList/testRuntimeSpecConf.groovy"; + } + + private Map getEnvMap(String command) { + def envMap = [:] + Shell sh = new Shell() + def envvars = sh.exec(command).getOut() + if (sh.getRet() == 0) { + envvars.each { + def match = it =~ /(?[^=]+)='(?[^']+)'$/ + if ( match.matches() ) { + envMap[match.group('variable')] = match.group('value') + } + } + } + return envMap + } + + private String getEnv(String name, String cmd) { + String value = ENV[name] + if (value == null) { + value = getEnvMap(cmd)[name] + } + return value + } + + @Test + public void testAll() { + switch (type) { + case 'shell': + Shell sh = new Shell() + def output = sh.exec(arguments['command']).getOut().join("\n") + int actualResult = sh.getRet() + int expectedResult = arguments['expectedResult'] ? arguments['expectedResult'] : 0 // use 0 as default success code + Assert.assertTrue("${testName} fail: ${arguments['message']} - '${arguments['command']}' returned ${actualResult} instead of ${expectedResult}", + actualResult == expectedResult) + break + + case 'envdir': + def var = arguments['variable'] + def isPathRelative = arguments['relative'] + def pathString = getEnv(var, arguments['envcmd']) + Assert.assertTrue("${testName} fail: environment variable ${var} does not exist", pathString != null ) + + if ( arguments['pattern'] ) { + Assert.assertTrue("${testName} fail: $pathString doesn't contain expected pattern", + pathString ==~ /${arguments['pattern']}/) + } + + def pathFile = new File(pathString) + if ( isPathRelative ) { + Assert.assertFalse("${testName} fail: ${pathString} is not relative", pathFile.isAbsolute() ) + } else { + if (!arguments['donotcheckexistance']) { + Assert.assertTrue("${testName} fail: ${pathString} does not exist", pathFile.exists() ) + Assert.assertTrue("${testName} fail: ${pathString} is not directory", pathFile.isDirectory() ) + } + } + break + + case 'dirstruct': + def expectedFiles = [] + new File("${testsList}", "${arguments['referenceList']}").eachLine { line -> + expectedFiles << ~line + } + def baseDirEnv = getEnv(arguments['baseDirEnv'], arguments['envcmd']) + Assert.assertNotNull("${baseDirEnv} has to be set for the test to continue", + baseDirEnv) + def root = new File(baseDirEnv) + def actualFiles = [] + def missingFiles = [] + if ( ! root.exists() ) { + Assert.assertFail("${testName} fail: ${baseDirEnv} does not exist!"); + } + + root.eachFileRecurse(FileType.ANY) { file -> + def relPath = new File( root.toURI().relativize( file.toURI() ).toString() ).path + actualFiles << relPath + } + + expectedFiles.each { wantFile -> + def ok = false + for (def x : actualFiles) { + if (actualFiles =~ wantFile) { + ok = true + break + } + } + if (!ok) { + missingFiles << wantFile + } + } + + Assert.assertTrue("${testName} fail: Directory structure for ${baseDirEnv} does not match reference. Missing files: ${missingFiles} ", + missingFiles.size() == 0) + break + + case 'dircontent': + def expectedFiles = [] + new File("${testsList}", "${arguments['referenceList']}").eachLine { line -> + expectedFiles << ~line + } + + def baseDir = getEnv(arguments['baseDirEnv'], arguments['envcmd']) + def subDir = arguments['subDir'] + if (!subDir && arguments['subDirEnv']) { + subDir = getEnv(arguments['subDirEnv'], arguments['envcmd']) + } + + def dir = null + if (subDir) { + dir = new File(baseDir, subDir) + } else { + dir = new File(baseDir) + } + Assert.assertNotNull("Directory has to be set for the test to continue", dir) + + def actualFiles = [] + if (dir.exists()) { + dir.eachFile FileType.FILES, { file -> + def relPath = new File( dir.toURI().relativize( file.toURI() ).toString() ).path + actualFiles << relPath + } + } + + def missingList = [] + for (def wantFile : expectedFiles) { + def ok = false + for (def haveFile : actualFiles) { + if (haveFile =~ wantFile) { + ok = true + break + } + } + if (! ok) { + missingList << wantFile + } + } + + def extraList = [] + for (def haveFile : actualFiles) { + def ok = false + for (def wantFile : expectedFiles) { + if (haveFile =~ wantFile) { + ok = true + break + } + } + if (! ok) { + extraList << haveFile + } + } + + def commonFiles = actualFiles.intersect(expectedFiles) + Assert.assertTrue("${testName} fail: Directory content for ${dir.path} does not match reference. Missing files: ${missingList}. Extra files: ${extraList}", + missingList.size() == 0 && extraList.size() == 0) + break + case 'hadoop_tools': + def toolsPathStr = getEnv("HADOOP_TOOLS_PATH", "hadoop envvars") + Assert.assertNotNull("${testName} fail: HADOOP_TOOLS_PATH environment variable should be set", toolsPathStr) + + def toolsPath = new File(toolsPathStr) + Assert.assertTrue("${testName} fail: HADOOP_TOOLS_PATH must be an absolute path.", toolsPath.isAbsolute()) + + Shell sh = new Shell() + def classPath = sh.exec("hadoop classpath").getOut().join("\n") + Assert.assertTrue("${testName} fail: Failed to retrieve hadoop's classpath", sh.getRet()==0) + + Assert.assertFalse("${testName} fail: The enire '${toolsPath}' path should not be included in the hadoop's classpath", + classPath.split(File.pathSeparator).any { + new File(it).getCanonicalPath() =~ /^${toolsPath}\/?\*/ + } + ) + break + case 'api_examination': + def basedir = getEnv(arguments['baseDirEnv'], arguments['envcmd']) + def libdir = getEnv(arguments['libDir'], arguments['envcmd']) + + def dir = new File(basedir + "/" + libdir) + Assert.assertTrue("Expected " + dir.getPath() + " to be a directory", dir.isDirectory()) + def pattern = Pattern.compile(arguments['jar'] + "-[0-9]+.*\\.jar") + def String[] jars = dir.list(new FilenameFilter() { + @Override + boolean accept(File d, String name) { + Matcher matcher = pattern.matcher(name) + return (matcher.matches() && !name.contains("test")) + } + }) + Assert.assertEquals("Expected only one jar, but got " + jars.join(", "), 1, jars.length) + def jar = dir.getAbsolutePath() + "/" + jars[0] + + def examinerJar = System.properties['odpi.test.hive.hcat.job.jar'] + def resourceFile = System.properties['test.resources.dir']+ "/" + arguments['resourceFile'] + Shell sh = new Shell() + def results = sh.exec("hadoop jar " + examinerJar + " org.odpi.specs.runtime.hadoop.ApiExaminer -c " + resourceFile + " -j " + jar).getErr() + int rc = sh.getRet() + Assert.assertEquals("Expected command to succeed, but got return code " + rc, 0, rc) + if (results.size() > 0) { + System.out.println("Received report for jar " + arguments['jar'] + results.join("\n")) + } + break; + + + default: + break + } + } +} diff --git a/bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/HiveHelper.java b/bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/HiveHelper.java new file mode 100644 index 00000000..3e56224b --- /dev/null +++ b/bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/HiveHelper.java @@ -0,0 +1,121 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + *

+ * http://www.apache.org/licenses/LICENSE-2.0 + *

+ * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.odpi.specs.runtime.hive; + +import java.io.ByteArrayOutputStream; +import java.io.IOException; +import java.util.HashMap; +import java.util.Map; + +import org.apache.commons.exec.CommandLine; +import org.apache.commons.exec.DefaultExecuteResultHandler; +import org.apache.commons.exec.DefaultExecutor; +import org.apache.commons.exec.ExecuteException; +import org.apache.commons.exec.ExecuteWatchdog; +import org.apache.commons.exec.Executor; +import org.apache.commons.exec.PumpStreamHandler; +import org.apache.commons.exec.environment.EnvironmentUtils; +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; + +public class HiveHelper { + + private static final Log LOG = LogFactory.getLog(HiveHelper.class.getName()); + + public static Map execCommand(CommandLine commandline) { + return execCommand(commandline, null); + } + + public static Map execCommand(CommandLine commandline, + Map envVars) { + + System.out.println("Executing command:"); + System.out.println(commandline.toString()); + Map env = null; + Map entry = new HashMap(); + try { + env = EnvironmentUtils.getProcEnvironment(); + } catch (IOException e1) { + // TODO Auto-generated catch block + LOG.debug("Failed to get process environment: "+ e1.getMessage()); + e1.printStackTrace(); + } + if (envVars != null) { + for (String key : envVars.keySet()) { + env.put(key, envVars.get(key)); + } + } + + DefaultExecuteResultHandler resultHandler = new DefaultExecuteResultHandler(); + ByteArrayOutputStream outputStream = new ByteArrayOutputStream(); + PumpStreamHandler streamHandler = new PumpStreamHandler(outputStream); + ExecuteWatchdog watchdog = new ExecuteWatchdog(60*10000); + Executor executor = new DefaultExecutor(); + executor.setExitValue(1); + executor.setWatchdog(watchdog); + executor.setStreamHandler(streamHandler); + try { + executor.execute(commandline, env, resultHandler); + } catch (ExecuteException e) { + // TODO Auto-generated catch block + LOG.debug("Failed to execute command with exit value: "+ String.valueOf(resultHandler.getExitValue())); + LOG.debug("outputStream: "+ outputStream.toString()); + entry.put("exitValue", String.valueOf(resultHandler.getExitValue())); + entry.put("outputStream", outputStream.toString() + e.getMessage()); + e.printStackTrace(); + return entry; + } catch (IOException e) { + // TODO Auto-generated catch block + LOG.debug("Failed to execute command with exit value: "+ String.valueOf(resultHandler.getExitValue())); + LOG.debug("outputStream: "+ outputStream.toString()); + entry.put("exitValue", String.valueOf(resultHandler.getExitValue())); + entry.put("outputStream", outputStream.toString() + e.getMessage()); + e.printStackTrace(); + return entry; + } + + try { + resultHandler.waitFor(); + /*System.out.println("Command output: "+outputStream.toString());*/ + entry.put("exitValue", String.valueOf(resultHandler.getExitValue())); + entry.put("outputStream", outputStream.toString()); + return entry; + } catch (InterruptedException e) { + // TODO Auto-generated catch block + /*System.out.println("Command output: "+outputStream.toString());*/ + LOG.debug("exitValue: "+ String.valueOf(resultHandler.getExitValue())); + LOG.debug("outputStream: "+ outputStream.toString()); + entry.put("exitValue", String.valueOf(resultHandler.getExitValue())); + entry.put("outputStream", outputStream.toString()); + e.printStackTrace(); + return entry; + } + } + + protected static String getProperty(String property, String description) { + String val = System.getProperty(property); + if (val == null) { + throw new RuntimeException("You must set the property " + property + " with " + + description); + } + LOG.debug(description + " is " + val); + return val; + } + + +} diff --git a/bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/JdbcConnector.java b/bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/JdbcConnector.java new file mode 100644 index 00000000..7512dabf --- /dev/null +++ b/bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/JdbcConnector.java @@ -0,0 +1,79 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + *

+ * http://www.apache.org/licenses/LICENSE-2.0 + *

+ * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.odpi.specs.runtime.hive; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.junit.AfterClass; +import org.junit.BeforeClass; + +import java.sql.Connection; +import java.sql.DriverManager; +import java.sql.SQLException; +import java.util.Properties; + +public class JdbcConnector { + private static final Log LOG = LogFactory.getLog(JdbcConnector.class.getName()); + + protected static final String URL = "odpi.test.hive.jdbc.url"; + protected static final String USER = "odpi.test.hive.jdbc.user"; + protected static final String PASSWD = "odpi.test.hive.jdbc.password"; + protected static final String LOCATION = "odpi.test.hive.location"; + protected static final String METASTORE_URL = "odpi.test.hive.metastore.url"; + protected static final String TEST_THRIFT = "odpi.test.hive.thrift.test"; + protected static final String TEST_HCATALOG = "odpi.test.hive.hcatalog.test"; + protected static final String HIVE_CONF_DIR = "odpi.test.hive.conf.dir"; + protected static final String HADOOP_CONF_DIR = "odpi.test.hadoop.conf.dir"; + + protected static Connection conn; + + @BeforeClass + public static void connectToJdbc() throws SQLException { + // Assume they've put the URL for the JDBC driver in an environment variable. + String jdbcUrl = getProperty(URL, "the JDBC URL"); + String jdbcUser = getProperty(USER, "the JDBC user name"); + String jdbcPasswd = getProperty(PASSWD, "the JDBC password"); + + Properties props = new Properties(); + props.put("user", jdbcUser); + if (!jdbcPasswd.equals("")) props.put("password", jdbcPasswd); + conn = DriverManager.getConnection(jdbcUrl, props); + } + + @AfterClass + public static void closeJdbc() throws SQLException { + if (conn != null) conn.close(); + } + + protected static String getProperty(String property, String description) { + String val = System.getProperty(property); + if (val == null) { + throw new RuntimeException("You must set the property " + property + " with " + + description); + } + LOG.debug(description + " is " + val); + return val; + } + + protected static boolean testActive(String property, String description) { + String val = System.getProperty(property, "true"); + LOG.debug(description + " is " + val); + return Boolean.valueOf(val); + } + +} diff --git a/bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/TestBeeline.java b/bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/TestBeeline.java new file mode 100644 index 00000000..578621aa --- /dev/null +++ b/bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/TestBeeline.java @@ -0,0 +1,201 @@ +package org.odpi.specs.runtime.hive; +import org.apache.commons.exec.CommandLine; +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.junit.AfterClass; +import org.junit.Assert; +import org.junit.BeforeClass; +import org.junit.Test; +import java.io.FileNotFoundException; +import java.io.PrintWriter; +import java.util.Map; + +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + *

+ * http://www.apache.org/licenses/LICENSE-2.0 + *

+ * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +public class TestBeeline { + + public static final Log LOG = LogFactory.getLog(TestBeeline.class.getName()); + + private static final String URL = "odpi.test.hive.jdbc.url"; + private static final String USER = "odpi.test.hive.jdbc.user"; + private static final String PASSWD = "odpi.test.hive.jdbc.password"; + + private static Map results; + private static String beelineUrl; + private static String beelineUser; + private static String beelinePasswd; + + //creating beeline base command with username and password as per inputs + private static CommandLine beelineBaseCommand = new CommandLine("beeline"); + + @BeforeClass + public static void initialSetup(){ + TestBeeline.beelineUrl = System.getProperty(URL); + TestBeeline.beelineUser = System.getProperty(USER); + TestBeeline.beelinePasswd =System.getProperty(PASSWD); + + if (beelineUser != null && beelineUser != "" && beelinePasswd != null && beelinePasswd != "") + { + beelineBaseCommand.addArgument("-u").addArgument(beelineUrl).addArgument("-n").addArgument(beelineUser).addArgument("-p").addArgument(beelinePasswd); + } + else if (beelineUser != null && beelineUser != "") + { + beelineBaseCommand.addArgument("-u").addArgument(beelineUrl).addArgument("-n").addArgument(beelineUser); + } + else { + beelineBaseCommand.addArgument("-u").addArgument(beelineUrl); + } + LOG.info("URL is " + beelineUrl); + LOG.info("User is " + beelineUser); + LOG.info("Passwd is " + beelinePasswd); + LOG.info("Passwd is null " + (beelinePasswd == null)); + } + + @Test + public void checkBeeline() { + results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand)); + String consoleMsg = results.get("outputStream").toLowerCase(); + Assert.assertEquals("beeline -u FAILED. \n" +results.get("outputStream"), true, consoleMsg.contains("connecting to "+beelineUrl.toLowerCase()) && !consoleMsg.contains("error") && !consoleMsg.contains("exception")); + } + + @Test + public void checkBeelineConnect(){ + try(PrintWriter out = new PrintWriter("connect.url")){ out.println("!connect " + beelineUrl+" "+beelineUser+" "+beelinePasswd); out.println("!quit"); } + catch (FileNotFoundException e1) { + e1.printStackTrace(); + } + results = HiveHelper.execCommand(new CommandLine("/bin/sh").addArgument("-c").addArgument("beeline -f connect.url",false)); + String consoleMsg = results.get("outputStream").toLowerCase(); + Assert.assertEquals("beeline !connect FAILED. \n" +results.get("outputStream"), true, consoleMsg.contains("connecting to "+beelineUrl.toLowerCase()) && !consoleMsg.contains("error") && !consoleMsg.contains("exception") ); + } + + @Test + public void checkBeelineHelp(){ + results = HiveHelper.execCommand(new CommandLine("beeline").addArgument("--help")); + String consoleMsg = results.get("outputStream").toLowerCase(); + Assert.assertEquals("beeline --help FAILED. \n" +results.get("outputStream"), true, consoleMsg.contains("display this message" ) && consoleMsg.contains("usage: java org.apache.hive.cli.beeline.beeline") && !consoleMsg.contains("exception")); + } + + @Test + public void checkBeelineQueryExecFromCmdLine(){ + results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("-e").addArgument("SHOW DATABASES;")); + if(!results.get("outputStream").contains("odpi_runtime_hive")){ + results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("-e").addArgument("CREATE DATABASE odpi_runtime_hive;")); + results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("-e").addArgument("SHOW DATABASES;")); + }else{ + results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("-e").addArgument("DROP DATABASE odpi_runtime_hive;")); + results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("-e").addArgument("CREATE DATABASE odpi_runtime_hive;")); + results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("-e").addArgument("SHOW DATABASES;")); + } + String consoleMsg = results.get("outputStream").toLowerCase(); + Assert.assertEquals("beeline -e FAILED. \n" +results.get("outputStream"), true, consoleMsg.contains("odpi_runtime_hive" ) && !consoleMsg.contains("error") && !consoleMsg.contains("exception")); + HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("-e").addArgument("DROP DATABASE odpi_runtime_hive")); + } + + @Test + public void checkBeelineQueryExecFromFile() throws FileNotFoundException{ + + try(PrintWriter out = new PrintWriter("beeline-f1.sql")){ out.println("SHOW DATABASES;"); } + try(PrintWriter out = new PrintWriter("beeline-f2.sql")){ out.println("CREATE DATABASE odpi_runtime_hive;"); } + try(PrintWriter out = new PrintWriter("beeline-f3.sql")){ out.println("DROP DATABASE odpi_runtime_hive;"); out.println("CREATE DATABASE odpi_runtime_hive;"); } + try(PrintWriter out = new PrintWriter("beeline-f4.sql")){ out.println("DROP DATABASE odpi_runtime_hive;"); } + results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("-f").addArgument("beeline-f1.sql",false)); + + if(!results.get("outputStream").contains("odpi_runtime_hive")){ + results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("-f").addArgument("beeline-f2.sql",false)); + }else{ + results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("-f").addArgument("beeline-f3.sql",false)); + } + + results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("-f").addArgument("beeline-f1.sql",false)); + + String consoleMsg = results.get("outputStream").toLowerCase(); + Assert.assertEquals("beeline -f FAILED. \n" +results.get("outputStream"), true, consoleMsg.contains("odpi_runtime_hive" ) && !consoleMsg.contains("error") && !consoleMsg.contains("exception")); + HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("-f").addArgument("beeline-f4.sql",false)); + } + + @Test + public void checkBeelineInitFile() throws FileNotFoundException{ + + try(PrintWriter out = new PrintWriter("beeline-i1.sql")){ out.println("SHOW DATABASES;"); } + try(PrintWriter out = new PrintWriter("beeline-i2.sql")){ out.println("CREATE DATABASE odpi_runtime_beeline_init;"); } + try(PrintWriter out = new PrintWriter("beeline-i3.sql")){ out.println("DROP DATABASE odpi_runtime_beeline_init;"); out.println("CREATE DATABASE odpi_runtime_beeline_init;"); } + try(PrintWriter out = new PrintWriter("beeline-i4.sql")){ out.println("DROP DATABASE odpi_runtime_beeline_init;"); } + results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("-i").addArgument("beeline-i1.sql",false)); + + if(!results.get("outputStream").contains("odpi_runtime_beeline_init")){ + results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("-i").addArgument("beeline-i2.sql",false)); + }else{ + results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("-i").addArgument("beeline-i3.sql",false)); + } + + results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("-i").addArgument("beeline-i1.sql",false)); + String consoleMsg = results.get("outputStream").toLowerCase(); + Assert.assertEquals("beeline -i FAILED. \n" +results.get("outputStream"), true, consoleMsg.contains("odpi_runtime_beeline_init") && !consoleMsg.contains("error") && !consoleMsg.contains("exception")); + HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("-i").addArgument("beeline-i4.sql",false)); + } + + @Test + public void checkBeelineHiveVar() throws FileNotFoundException{ + + try(PrintWriter out = new PrintWriter("beeline-hv1.sql")){ out.println("SHOW DATABASES;"); } + try(PrintWriter out = new PrintWriter("beeline-hv2.sql")){ out.println("CREATE DATABASE ${db};"); } + try(PrintWriter out = new PrintWriter("beeline-hv3.sql")){ out.println("DROP DATABASE ${db};"); out.println("CREATE DATABASE ${db};"); } + try(PrintWriter out = new PrintWriter("beeline-hv4.sql")){ out.println("DROP DATABASE ${db};"); } + results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("--hivevar").addArgument("db=odpi_runtime_beeline_hivevar").addArgument("-i").addArgument("beeline-hv1.sql",false)); + + if(!results.get("outputStream").contains("odpi_runtime_beeline_hivevar")){ + results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("--hivevar").addArgument("db=odpi_runtime_beeline_hivevar").addArgument("-i").addArgument("beeline-hv2.sql",false)); + }else{ + results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("--hivevar").addArgument("db=odpi_runtime_beeline_hivevar").addArgument("-i").addArgument("beeline-hv3.sql",false)); + } + + results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("--hivevar").addArgument("db=odpi_runtime_beeline_hivevar").addArgument("-i").addArgument("beeline-hv1.sql",false)); + String consoleMsg = results.get("outputStream").toLowerCase(); + Assert.assertEquals("beeline --hivevar FAILED. \n" +results.get("outputStream"), true, consoleMsg.contains("odpi_runtime_beeline_hivevar") && !consoleMsg.contains("error") && !consoleMsg.contains("exception")); + HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("--hivevar").addArgument("db=odpi_runtime_beeline_hivevar").addArgument("-i").addArgument("beeline-hv4.sql",false)); + } + + @Test + public void checkBeelineFastConnect(){ + results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("--fastConnect=false")); + String consoleMsg = results.get("outputStream").toLowerCase(); + Assert.assertEquals("beeline --fastConnect FAILED. \n" +results.get("outputStream"), true, consoleMsg.contains("set fastconnect to true to skip")); + } + + @Test + public void checkBeelineVerbose(){ + results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("--verbose=true")); + String consoleMsg = results.get("outputStream").toLowerCase(); + Assert.assertEquals("beeline --verbose FAILED." +results.get("outputStream"), true, consoleMsg.contains("issuing: !connect jdbc:hive2:") && !consoleMsg.contains("error") && !consoleMsg.contains("exception")); + } + + @Test + public void checkBeelineShowHeader(){ + results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("--showHeader=false").addArgument("-e").addArgument("SHOW DATABASES;")); + String consoleMsg = results.get("outputStream").toLowerCase(); + Assert.assertEquals("beeline --showHeader FAILED. \n" +results.get("outputStream"), true, consoleMsg.contains("default")&&!consoleMsg.contains("database_name") && !consoleMsg.contains("error") && !consoleMsg.contains("exception")); + } + + @AfterClass + public static void cleanup() throws FileNotFoundException { + results = HiveHelper.execCommand(new CommandLine("/bin/sh").addArgument("-c").addArgument("rm -rf beeline*.sql", false)); + results = HiveHelper.execCommand(new CommandLine("/bin/sh").addArgument("-c").addArgument("rm -rf connect.url", false)); + } +} diff --git a/bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/TestCLI.java b/bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/TestCLI.java new file mode 100644 index 00000000..2b70909e --- /dev/null +++ b/bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/TestCLI.java @@ -0,0 +1,213 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + *

+ * http://www.apache.org/licenses/LICENSE-2.0 + *

+ * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.odpi.specs.runtime.hive; + +import java.io.FileNotFoundException; +import java.io.PrintWriter; +import java.util.Map; + +import org.apache.commons.exec.CommandLine; +import org.junit.BeforeClass; +import org.junit.Test; +import org.junit.AfterClass; +import org.junit.Assert; + +public class TestCLI { + + static Map results; + static String db = "javax.jdo.option.ConnectionURL=jdbc:derby:;databaseName=odpi_metastore_db;create=true"; + + @BeforeClass + public static void setup(){ + + results = HiveHelper.execCommand(new CommandLine("which").addArgument("hive")); + Assert.assertEquals("Hive is not in the current path.", 0, Integer.parseInt(results.get("exitValue"))); + } + + @Test + public void help(){ + results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-H")); + //LOG.info(results.get("exitValue")); + Assert.assertEquals("Error in executing 'hive -H'", 2, Integer.parseInt(results.get("exitValue"))); + + results = HiveHelper.execCommand(new CommandLine("hive").addArgument("--help")); + Assert.assertEquals("Error in executing 'hive --help'", 0, Integer.parseInt(results.get("exitValue"))); + + results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-U")); + Assert.assertEquals("Unrecognized option should exit 1.", 1, Integer.parseInt(results.get("exitValue"))); + } + + @Test + public void sqlFromCmdLine(){ + + results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("SHOW DATABASES").addArgument("--hiveconf").addArgument(db)); + Assert.assertEquals("SHOW DATABASES command failed to execute.", 0, Integer.parseInt(results.get("exitValue"))); + if(!results.get("outputStream").contains("odpi_runtime_hive")){ + results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("CREATE DATABASE odpi_runtime_hive").addArgument("--hiveconf").addArgument(db)); + Assert.assertEquals("Could not create database odpi_runtime_hive.", 0, Integer.parseInt(results.get("exitValue"))); + }else{ + results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("DROP DATABASE odpi_runtime_hive").addArgument("--hiveconf").addArgument(db)); + results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("CREATE DATABASE odpi_runtime_hive").addArgument("--hiveconf").addArgument(db)); + Assert.assertEquals("Could not create database odpi_runtime_hive.", 0, Integer.parseInt(results.get("exitValue"))); + } + results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("DROP DATABASE odpi_runtime_hive").addArgument("--hiveconf").addArgument(db)); + } + + @Test + public void sqlFromFiles() throws FileNotFoundException{ + try(PrintWriter out = new PrintWriter("hive-f1.sql")){ out.println("SHOW DATABASES;"); } + try(PrintWriter out = new PrintWriter("hive-f2.sql")){ out.println("CREATE DATABASE odpi_runtime_hive;"); } + try(PrintWriter out = new PrintWriter("hive-f3.sql")){ out.println("DROP DATABASE odpi_runtime_hive;"); out.println("CREATE DATABASE odpi_runtime_hive;"); } + try(PrintWriter out = new PrintWriter("hive-f4.sql")){ out.println("DROP DATABASE odpi_runtime_hive;"); } + results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-f").addArgument("hive-f1.sql").addArgument("--hiveconf").addArgument(db)); + Assert.assertEquals("SHOW DATABASES command failed to execute.", 0, Integer.parseInt(results.get("exitValue"))); + if(!results.get("outputStream").contains("odpi_runtime_hive")){ + results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-f").addArgument("hive-f2.sql").addArgument("--hiveconf").addArgument(db)); + Assert.assertEquals("Could not create database odpi_runtime_hive.", 0, Integer.parseInt(results.get("exitValue"))); + }else{ + results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-f").addArgument("hive-f3.sql").addArgument("--hiveconf").addArgument(db)); + Assert.assertEquals("Could not create database odpi_runtime_hive.", 0, Integer.parseInt(results.get("exitValue"))); + } + results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-f").addArgument("hive-f4.sql").addArgument("--hiveconf").addArgument(db)); + } + + @Test + public void silent() { + results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("SHOW DATABASES").addArgument("-S").addArgument("--hiveconf").addArgument(db)); + Assert.assertEquals("-S option did not work.", new Boolean(false), results.get("outputStream").contains("Time taken:")); + + results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("SHOW DATABASES").addArgument("--silent").addArgument("--hiveconf").addArgument(db)); + Assert.assertEquals("--silent option did not work.", new Boolean(false), results.get("outputStream").contains("Time taken:")); + } + + @Test + public void verbose(){ + results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("SHOW DATABASES").addArgument("-v").addArgument("--hiveconf").addArgument(db)); + Assert.assertEquals("-v option did not work.", new Boolean(true), results.get("outputStream").contains("SHOW DATABASES")); + + results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("SHOW DATABASES").addArgument("--verbose").addArgument("--hiveconf").addArgument(db)); + Assert.assertEquals("--verbose option did not work.", new Boolean(true), results.get("outputStream").contains("SHOW DATABASES")); + } + + @Test + public void initialization() throws FileNotFoundException{ + try(PrintWriter out = new PrintWriter("hive-init1.sql")){ out.println("CREATE DATABASE odpi_runtime_hive;"); } + try(PrintWriter out = new PrintWriter("hive-init2.sql")){ out.println("DROP DATABASE odpi_runtime_hive;"); out.println("CREATE DATABASE odpi_runtime_hive;"); } + + results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("SHOW DATABASES").addArgument("--hiveconf").addArgument(db)); + Assert.assertEquals("SHOW DATABASES command failed to execute.", 0, Integer.parseInt(results.get("exitValue"))); + if(!results.get("outputStream").contains("odpi_runtime_hive")){ + results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-i").addArgument("hive-init1.sql").addArgument("-e").addArgument("SHOW DATABASES").addArgument("--hiveconf").addArgument(db)); + Assert.assertEquals("Could not create database odpi_runtime_hive using the init -i option.", 0, Integer.parseInt(results.get("exitValue"))); + Assert.assertEquals("Could not create database odpi_runtime_hive using the init -i option.", true, results.get("outputStream").contains("odpi_runtime_hive")); + }else{ + results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-i").addArgument("hive-init2.sql").addArgument("-e").addArgument("SHOW DATABASES").addArgument("--hiveconf").addArgument(db)); + Assert.assertEquals("Could not create database odpi_runtime_hive.", 0, Integer.parseInt(results.get("exitValue"))); + Assert.assertEquals("Could not create database odpi_runtime_hive using the init -i option.", true, results.get("outputStream").contains("odpi_runtime_hive")); + } + results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("DROP DATABASE odpi_runtime_hive").addArgument("--hiveconf").addArgument(db)); + } + + @Test + public void database(){ + + results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("SHOW DATABASES").addArgument("--hiveconf").addArgument(db)); + if(!results.get("outputStream").contains("odpi_runtime_hive")){ + results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("CREATE DATABASE odpi_runtime_hive").addArgument("--hiveconf").addArgument(db)); + }else{ + results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("DROP DATABASE odpi_runtime_hive").addArgument("--hiveconf").addArgument(db)); + results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("CREATE DATABASE odpi_runtime_hive").addArgument("--hiveconf").addArgument(db)); + } + results = HiveHelper.execCommand(new CommandLine("hive").addArgument("--database").addArgument("odpi_runtime_hive_1234").addArgument("-e").addArgument("CREATE TABLE odpi ( MYID INT );").addArgument("--hiveconf").addArgument(db)); + Assert.assertEquals("Non-existent database returned with wrong exit code: "+Integer.parseInt(results.get("exitValue")), 88, Integer.parseInt(results.get("exitValue"))); + + results = HiveHelper.execCommand(new CommandLine("hive").addArgument("--database").addArgument("odpi_runtime_hive").addArgument("-e").addArgument("CREATE TABLE odpi ( MYID INT );").addArgument("--hiveconf").addArgument(db)); + Assert.assertEquals("Failed to create table using --database argument.", 0, Integer.parseInt(results.get("exitValue"))); + + results = HiveHelper.execCommand(new CommandLine("hive").addArgument("--database").addArgument("odpi_runtime_hive").addArgument("-e").addArgument("DESCRIBE odpi").addArgument("--hiveconf").addArgument(db)); + Assert.assertEquals("Failed to get expected column after creating odpi table using --database argument.", true, results.get("outputStream").contains("myid")); + + results = HiveHelper.execCommand(new CommandLine("hive").addArgument("--database").addArgument("odpi_runtime_hive").addArgument("-e").addArgument("DROP TABLE odpi").addArgument("--hiveconf").addArgument(db)); + Assert.assertEquals("Failed to create table using --database argument.", 0, Integer.parseInt(results.get("exitValue"))); + + results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("DROP DATABASE odpi_runtime_hive").addArgument("--hiveconf").addArgument(db)); + } + + @Test + public void hiveConf(){ + results = HiveHelper.execCommand(new CommandLine("hive").addArgument("--hiveconf").addArgument("hive.root.logger=INFO,console").addArgument("-e").addArgument("SHOW DATABASES").addArgument("--hiveconf").addArgument(db)); + Assert.assertEquals("The --hiveconf option did not work in setting hive.root.logger=INFO,console.", true, results.get("outputStream").contains("INFO parse.ParseDriver: Parsing command: SHOW DATABASES")); + + results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-hiveconf").addArgument("hive.root.logger=INFO,console").addArgument("-e").addArgument("SHOW DATABASES").addArgument("--hiveconf").addArgument(db)); + Assert.assertEquals("The -hiveconf variant option did not work in setting hive.root.logger=INFO,console.", true, results.get("outputStream").contains("INFO parse.ParseDriver: Parsing command: SHOW DATABASES")); + } + + @Test + public void variableSubsitution() throws FileNotFoundException{ + results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("SHOW DATABASES").addArgument("--hiveconf").addArgument(db)); + if(!results.get("outputStream").contains("odpi_runtime_hive")){ + results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("CREATE DATABASE odpi_runtime_hive").addArgument("--hiveconf").addArgument(db)); + }else{ + results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("DROP DATABASE odpi_runtime_hive").addArgument("--hiveconf").addArgument(db)); + results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("CREATE DATABASE odpi_runtime_hive").addArgument("--hiveconf").addArgument(db)); + } + try(PrintWriter out = new PrintWriter("hive-define.sql")){ out.println("show ${A};"); out.println("quit;"); } + results = HiveHelper.execCommand(new CommandLine("/bin/sh").addArgument("-c").addArgument("hive -d A=DATABASES --hiveconf '"+db+"' < hive-define.sql", false)); + Assert.assertEquals("The hive -d A=DATABASES option did not work.", 0, Integer.parseInt(results.get("exitValue"))); + Assert.assertEquals("The hive -d A=DATABASES option did not work.", true, results.get("outputStream").contains("odpi_runtime_hive")); + + results = HiveHelper.execCommand(new CommandLine("/bin/sh").addArgument("-c").addArgument("hive --define A=DATABASES --hiveconf '"+db+"' < hive-define.sql", false)); + Assert.assertEquals("The hive --define A=DATABASES option did not work.", 0, Integer.parseInt(results.get("exitValue"))); + Assert.assertEquals("The hive --define A=DATABASES option did not work.", true, results.get("outputStream").contains("odpi_runtime_hive")); + + results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("DROP DATABASE odpi_runtime_hive").addArgument("--hiveconf").addArgument(db)); + } + + @Test + public void hiveVar() throws FileNotFoundException{ + results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("SHOW DATABASES").addArgument("--hiveconf").addArgument(db)); + if(!results.get("outputStream").contains("odpi_runtime_hive")){ + results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("CREATE DATABASE odpi_runtime_hive").addArgument("--hiveconf").addArgument(db)); + }else{ + results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("DROP DATABASE odpi_runtime_hive").addArgument("--hiveconf").addArgument(db)); + results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("CREATE DATABASE odpi_runtime_hive").addArgument("--hiveconf").addArgument(db)); + } + try(PrintWriter out = new PrintWriter("hive-var.sql")){ out.println("show ${A};"); out.println("quit;"); } + results = HiveHelper.execCommand(new CommandLine("/bin/sh").addArgument("-c").addArgument("hive --hivevar A=DATABASES --hiveconf '"+db+"' < hive-var.sql", false)); + Assert.assertEquals("The hive --hivevar A=DATABASES option did not work.", 0, Integer.parseInt(results.get("exitValue"))); + Assert.assertEquals("The hive --hivevar A=DATABASES option did not work.", true, results.get("outputStream").contains("odpi_runtime_hive")); + + try(PrintWriter out = new PrintWriter("hiveconf-var.sql")){ out.println("show ${hiveconf:A};"); out.println("quit;"); } + results = HiveHelper.execCommand(new CommandLine("/bin/sh").addArgument("-c").addArgument("hive --hiveconf A=DATABASES --hiveconf '"+db+"' < hiveconf-var.sql", false)); + Assert.assertEquals("The hive --hiveconf A=DATABASES option did not work.", 0, Integer.parseInt(results.get("exitValue"))); + Assert.assertEquals("The hive --hiveconf A=DATABASES option did not work.", true, results.get("outputStream").contains("odpi_runtime_hive")); + + results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("DROP DATABASE odpi_runtime_hive").addArgument("--hiveconf").addArgument(db)); + } + + @AfterClass + public static void cleanup(){ + results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("DROP DATABASE odpi_runtime_hive").addArgument("--hiveconf").addArgument(db)); + results = HiveHelper.execCommand(new CommandLine("/bin/sh").addArgument("-c").addArgument("rm -rf hive-f*.sql", false)); + results = HiveHelper.execCommand(new CommandLine("/bin/sh").addArgument("-c").addArgument("rm -rf hive-init*.sql", false)); + results = HiveHelper.execCommand(new CommandLine("/bin/sh").addArgument("-c").addArgument("rm -rf hive-define.sql", false)); + results = HiveHelper.execCommand(new CommandLine("/bin/sh").addArgument("-c").addArgument("rm -rf hive-var.sql", false)); + results = HiveHelper.execCommand(new CommandLine("/bin/sh").addArgument("-c").addArgument("rm -rf hiveconf-var.sql", false)); + } + +} diff --git a/bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/TestHCatalog.java b/bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/TestHCatalog.java new file mode 100644 index 00000000..0ea49ce8 --- /dev/null +++ b/bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/TestHCatalog.java @@ -0,0 +1,158 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + *

+ * http://www.apache.org/licenses/LICENSE-2.0 + *

+ * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.odpi.specs.runtime.hive; + +import org.apache.commons.exec.CommandLine; +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.apache.hadoop.fs.FSDataOutputStream; +import org.apache.hadoop.fs.FileSystem; +import org.apache.hadoop.fs.Path; +import org.apache.hadoop.hive.conf.HiveConf; +import org.apache.hadoop.hive.metastore.HiveMetaStoreClient; +import org.apache.hadoop.hive.metastore.IMetaStoreClient; +import org.apache.hadoop.hive.metastore.TableType; +import org.apache.hadoop.hive.metastore.api.FieldSchema; +import org.apache.hadoop.hive.metastore.api.MetaException; +import org.apache.hadoop.hive.metastore.api.SerDeInfo; +import org.apache.hadoop.hive.metastore.api.StorageDescriptor; +import org.apache.hadoop.hive.metastore.api.Table; +import org.apache.hive.hcatalog.data.schema.HCatFieldSchema; +import org.apache.hive.hcatalog.data.schema.HCatSchema; +import org.apache.thrift.TException; +import org.junit.Assert; +import org.junit.Assume; +import org.junit.Before; +import org.junit.BeforeClass; +import org.junit.Test; + +import java.io.IOException; +import java.net.URISyntaxException; +import java.util.Arrays; +import java.util.Collections; +import java.util.HashMap; +import java.util.Map; +import java.util.Random; + + +public class TestHCatalog { + private static final String JOBJAR = "odpi.test.hive.hcat.job.jar"; + private static final String HCATCORE = "odpi.test.hive.hcat.core.jar"; + + private static final Log LOG = LogFactory.getLog(TestHCatalog.class.getName()); + + private static IMetaStoreClient client = null; + private static HiveConf conf; + private static HCatSchema inputSchema; + private static HCatSchema outputSchema; + + private Random rand; + + @BeforeClass + public static void connect() throws MetaException { + if (JdbcConnector.testActive(JdbcConnector.TEST_HCATALOG, "Test HCatalog ")) { + String hiveConfDir = JdbcConnector.getProperty(JdbcConnector.HIVE_CONF_DIR, + "Hive conf directory "); + String hadoopConfDir = JdbcConnector.getProperty(JdbcConnector.HADOOP_CONF_DIR, + "Hadoop conf directory "); + conf = new HiveConf(); + String fileSep = System.getProperty("file.separator"); + conf.addResource(new Path(hadoopConfDir + fileSep + "core-site.xml")); + conf.addResource(new Path(hadoopConfDir + fileSep + "hdfs-site.xml")); + conf.addResource(new Path(hadoopConfDir + fileSep + "yarn-site.xml")); + conf.addResource(new Path(hadoopConfDir + fileSep + "mapred-site.xml")); + conf.addResource(new Path(hiveConfDir + fileSep + "hive-site.xml")); + client = new HiveMetaStoreClient(conf); + + } + } + + @Before + public void checkIfActive() { + Assume.assumeTrue(JdbcConnector.testActive(JdbcConnector.TEST_HCATALOG, "Test HCatalog ")); + rand = new Random(); + } + + @Test + public void hcatInputFormatOutputFormat() throws TException, IOException, ClassNotFoundException, + InterruptedException, URISyntaxException { + // Create a table to write to + final String inputTable = "odpi_hcat_input_table_" + rand.nextInt(Integer.MAX_VALUE); + SerDeInfo serde = new SerDeInfo("default_serde", + conf.getVar(HiveConf.ConfVars.HIVEDEFAULTSERDE), new HashMap()); + FieldSchema schema = new FieldSchema("line", "string", ""); + inputSchema = new HCatSchema(Collections.singletonList(new HCatFieldSchema(schema.getName(), + HCatFieldSchema.Type.STRING, schema.getComment()))); + StorageDescriptor sd = new StorageDescriptor(Collections.singletonList(schema), null, + "org.apache.hadoop.mapred.TextInputFormat", + "org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat", false, 0, serde, null, null, + new HashMap()); + Table table = new Table(inputTable, "default", "me", 0, 0, 0, sd, null, + new HashMap(), null, null, TableType.MANAGED_TABLE.toString()); + client.createTable(table); + + final String outputTable = "odpi_hcat_output_table_" + rand.nextInt(Integer.MAX_VALUE); + sd = new StorageDescriptor(Arrays.asList( + new FieldSchema("word", "string", ""), + new FieldSchema("count", "int", "")), + null, "org.apache.hadoop.mapred.TextInputFormat", + "org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat", false, 0, serde, null, null, + new HashMap()); + table = new Table(outputTable, "default", "me", 0, 0, 0, sd, null, + new HashMap(), null, null, TableType.MANAGED_TABLE.toString()); + client.createTable(table); + outputSchema = new HCatSchema(Arrays.asList( + new HCatFieldSchema("word", HCatFieldSchema.Type.STRING, ""), + new HCatFieldSchema("count", HCatFieldSchema.Type.INT, ""))); + + // LATER Could I use HCatWriter here and the reader to read it? + // Write some stuff into a file in the location of the table + table = client.getTable("default", inputTable); + String inputFile = table.getSd().getLocation() + "/input"; + Path inputPath = new Path(inputFile); + FileSystem fs = FileSystem.get(conf); + FSDataOutputStream out = fs.create(inputPath); + out.writeChars("Mary had a little lamb\n"); + out.writeChars("its fleece was white as snow\n"); + out.writeChars("and everywhere that Mary went\n"); + out.writeChars("the lamb was sure to go\n"); + out.close(); + + Map env = new HashMap<>(); + env.put("HADOOP_CLASSPATH", System.getProperty(HCATCORE, "")); + Map results = HiveHelper.execCommand(new CommandLine("hive") + .addArgument("--service") + .addArgument("jar") + .addArgument(System.getProperty(JOBJAR)) + .addArgument(HCatalogMR.class.getName()) + .addArgument("-it") + .addArgument(inputTable) + .addArgument("-ot") + .addArgument(outputTable) + .addArgument("-is") + .addArgument(inputSchema.getSchemaAsTypeString()) + .addArgument("-os") + .addArgument(outputSchema.getSchemaAsTypeString()), env); + LOG.info(results.toString()); + Assert.assertEquals("HCat job failed", 0, Integer.parseInt(results.get("exitValue"))); + + client.dropTable("default", inputTable); + client.dropTable("default", outputTable); + } + +} diff --git a/bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/TestJdbc.java b/bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/TestJdbc.java new file mode 100644 index 00000000..154fd9cd --- /dev/null +++ b/bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/TestJdbc.java @@ -0,0 +1,545 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + *

+ * http://www.apache.org/licenses/LICENSE-2.0 + *

+ * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.odpi.specs.runtime.hive; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.junit.Test; + +import java.sql.Connection; +import java.sql.DatabaseMetaData; +import java.sql.PreparedStatement; +import java.sql.ResultSet; +import java.sql.ResultSetMetaData; +import java.sql.SQLException; +import java.sql.SQLWarning; +import java.sql.Statement; +import java.sql.Types; + +public class TestJdbc extends JdbcConnector { + private static final Log LOG = LogFactory.getLog(TestJdbc.class.getName()); + + /** + * Test simple non-statement related class. setSchema is tested elsewhere because there's work + * to do for that one. Similarly with getMetadata. + * @throws SQLException + */ + @Test + public void nonStatementCalls() throws SQLException { + conn.clearWarnings(); + + boolean isAutoCommit = conn.getAutoCommit(); + LOG.debug("Auto commit is " + isAutoCommit); + + String catalog = conn.getCatalog(); + LOG.debug("Catalog is " + catalog); + + String schema = conn.getSchema(); + LOG.debug("Schema is " + schema); + + int txnIsolation = conn.getTransactionIsolation(); + LOG.debug("Transaction Isolation is " + txnIsolation); + + SQLWarning warning = conn.getWarnings(); + while (warning != null) { + LOG.debug("Found a warning: " + warning.getMessage()); + warning = warning.getNextWarning(); + } + + boolean closed = conn.isClosed(); + LOG.debug("Is closed? " + closed); + + boolean readOnly = conn.isReadOnly(); + LOG.debug("Is read only?" + readOnly); + + // Hive doesn't support catalogs, so setting this to whatever should be fine. If we have + // non-Hive systems trying to pass this setting it to a non-valid catalog name may cause + // issues, so we may need to make this value configurable or something. + conn.setCatalog("fred"); + } + + /** + * Test simple DatabaseMetaData calls. getColumns is tested elsewhere, as we need to call + * that on a valid table. Same with getFunctions. + * @throws SQLException + */ + @Test + public void databaseMetaDataCalls() throws SQLException { + DatabaseMetaData md = conn.getMetaData(); + + boolean boolrc = md.allTablesAreSelectable(); + LOG.debug("All tables are selectable? " + boolrc); + + String strrc = md.getCatalogSeparator(); + LOG.debug("Catalog separator " + strrc); + + strrc = md.getCatalogTerm(); + LOG.debug("Catalog term " + strrc); + + ResultSet rs = md.getCatalogs(); + while (rs.next()) { + strrc = rs.getString(1); + LOG.debug("Found catalog " + strrc); + } + + Connection c = md.getConnection(); + + int intrc = md.getDatabaseMajorVersion(); + LOG.debug("DB major version is " + intrc); + + intrc = md.getDatabaseMinorVersion(); + LOG.debug("DB minor version is " + intrc); + + strrc = md.getDatabaseProductName(); + LOG.debug("DB product name is " + strrc); + + strrc = md.getDatabaseProductVersion(); + LOG.debug("DB product version is " + strrc); + + intrc = md.getDefaultTransactionIsolation(); + LOG.debug("Default transaction isolation is " + intrc); + + intrc = md.getDriverMajorVersion(); + LOG.debug("Driver major version is " + intrc); + + intrc = md.getDriverMinorVersion(); + LOG.debug("Driver minor version is " + intrc); + + strrc = md.getDriverName(); + LOG.debug("Driver name is " + strrc); + + strrc = md.getDriverVersion(); + LOG.debug("Driver version is " + strrc); + + strrc = md.getExtraNameCharacters(); + LOG.debug("Extra name characters is " + strrc); + + strrc = md.getIdentifierQuoteString(); + LOG.debug("Identifier quote string is " + strrc); + + // In Hive 1.2 this always returns an empty RS + rs = md.getImportedKeys("a", "b", "d"); + + // In Hive 1.2 this always returns an empty RS + rs = md.getIndexInfo("a", "b", "d", true, true); + + intrc = md.getJDBCMajorVersion(); + LOG.debug("JDBC major version is " + intrc); + + intrc = md.getJDBCMinorVersion(); + LOG.debug("JDBC minor version is " + intrc); + + intrc = md.getMaxColumnNameLength(); + LOG.debug("Maximum column name length is " + intrc); + + strrc = md.getNumericFunctions(); + LOG.debug("Numeric functions are " + strrc); + + // In Hive 1.2 this always returns an empty RS + rs = md.getPrimaryKeys("a", "b", "d"); + + // In Hive 1.2 this always returns an empty RS + rs = md.getProcedureColumns("a", "b", "d", "e"); + + strrc = md.getProcedureTerm(); + LOG.debug("Procedures are called " + strrc); + + // In Hive 1.2 this always returns an empty RS + rs = md.getProcedures("a", "b", "d"); + + strrc = md.getSchemaTerm(); + LOG.debug("Schemas are called " + strrc); + + rs = md.getSchemas(); + while (rs.next()) { + strrc = rs.getString(1); + LOG.debug("Found schema " + strrc); + } + + strrc = md.getSearchStringEscape(); + LOG.debug("Search string escape is " + strrc); + + strrc = md.getStringFunctions(); + LOG.debug("String functions are " + strrc); + + strrc = md.getSystemFunctions(); + LOG.debug("System functions are " + strrc); + + rs = md.getTableTypes(); + while (rs.next()) { + strrc = rs.getString(1); + LOG.debug("Found table type " + strrc); + } + + strrc = md.getTimeDateFunctions(); + LOG.debug("Time/date functions are " + strrc); + + rs = md.getTypeInfo(); + while (rs.next()) { + strrc = rs.getString(1); + LOG.debug("Found type " + strrc); + } + + // In Hive 1.2 this always returns an empty RS + rs = md.getUDTs("a", "b", "d", null); + + boolrc = md.supportsAlterTableWithAddColumn(); + LOG.debug("Supports alter table with add column? " + boolrc); + + boolrc = md.supportsAlterTableWithDropColumn(); + LOG.debug("Supports alter table with drop column? " + boolrc); + + boolrc = md.supportsBatchUpdates(); + LOG.debug("Supports batch updates? " + boolrc); + + boolrc = md.supportsCatalogsInDataManipulation(); + LOG.debug("Supports catalogs in data manipulation? " + boolrc); + + boolrc = md.supportsCatalogsInIndexDefinitions(); + LOG.debug("Supports catalogs in index definition? " + boolrc); + + boolrc = md.supportsCatalogsInPrivilegeDefinitions(); + LOG.debug("Supports catalogs in privilege definition? " + boolrc); + + boolrc = md.supportsCatalogsInProcedureCalls(); + LOG.debug("Supports catalogs in procedure calls? " + boolrc); + + boolrc = md.supportsCatalogsInTableDefinitions(); + LOG.debug("Supports catalogs in table definition? " + boolrc); + + boolrc = md.supportsColumnAliasing(); + LOG.debug("Supports column aliasing? " + boolrc); + + boolrc = md.supportsFullOuterJoins(); + LOG.debug("Supports full outer joins? " + boolrc); + + boolrc = md.supportsGroupBy(); + LOG.debug("Supports group by? " + boolrc); + + boolrc = md.supportsLimitedOuterJoins(); + LOG.debug("Supports limited outer joins? " + boolrc); + + boolrc = md.supportsMultipleResultSets(); + LOG.debug("Supports limited outer joins? " + boolrc); + + boolrc = md.supportsNonNullableColumns(); + LOG.debug("Supports non-nullable columns? " + boolrc); + + boolrc = md.supportsOuterJoins(); + LOG.debug("Supports outer joins? " + boolrc); + + boolrc = md.supportsPositionedDelete(); + LOG.debug("Supports positioned delete? " + boolrc); + + boolrc = md.supportsPositionedUpdate(); + LOG.debug("Supports positioned update? " + boolrc); + + boolrc = md.supportsResultSetHoldability(ResultSet.HOLD_CURSORS_OVER_COMMIT); + LOG.debug("Supports result set holdability? " + boolrc); + + boolrc = md.supportsResultSetType(ResultSet.HOLD_CURSORS_OVER_COMMIT); + LOG.debug("Supports result set type? " + boolrc); + + boolrc = md.supportsSavepoints(); + LOG.debug("Supports savepoints? " + boolrc); + + boolrc = md.supportsSchemasInDataManipulation(); + LOG.debug("Supports schemas in data manipulation? " + boolrc); + + boolrc = md.supportsSchemasInIndexDefinitions(); + LOG.debug("Supports schemas in index definitions? " + boolrc); + + boolrc = md.supportsSchemasInPrivilegeDefinitions(); + LOG.debug("Supports schemas in privilege definitions? " + boolrc); + + boolrc = md.supportsSchemasInProcedureCalls(); + LOG.debug("Supports schemas in procedure calls? " + boolrc); + + boolrc = md.supportsSchemasInTableDefinitions(); + LOG.debug("Supports schemas in table definitions? " + boolrc); + + boolrc = md.supportsSelectForUpdate(); + LOG.debug("Supports select for update? " + boolrc); + + boolrc = md.supportsStoredProcedures(); + LOG.debug("Supports stored procedures? " + boolrc); + + boolrc = md.supportsTransactions(); + LOG.debug("Supports transactions? " + boolrc); + + boolrc = md.supportsUnion(); + LOG.debug("Supports union? " + boolrc); + + boolrc = md.supportsUnionAll(); + LOG.debug("Supports union all? " + boolrc); + + } + + @Test + public void setSchema() throws SQLException { + try (Statement stmt = conn.createStatement(ResultSet.TYPE_SCROLL_INSENSITIVE, + ResultSet.CONCUR_READ_ONLY)) { + + final String dbName = "odpi_jdbc_test_db"; + + final String tableName = "odpi_jdbc_test_table"; + stmt.execute("drop table if exists " + tableName); + + stmt.execute("drop database if exists " + dbName + " cascade"); + stmt.execute("create database " + dbName); + + conn.setSchema(dbName); + + DatabaseMetaData md = conn.getMetaData(); + + ResultSet rs = md.getSchemas(null, dbName); + + while (rs.next()) { + String schemaName = rs.getString(2); + LOG.debug("Schema name is " + schemaName); + } + + stmt.execute("create table " + tableName + " (i int, s varchar(32))"); + + rs = md.getTables(null, dbName, tableName, null); + while (rs.next()) { + String tName = rs.getString(3); + LOG.debug("Schema name is " + tName); + } + + rs = md.getColumns(null, dbName, tableName, "i"); + while (rs.next()) { + String colName = rs.getString(4); + LOG.debug("Schema name is " + colName); + } + + rs = md.getFunctions(null, dbName, "foo"); + while (rs.next()) { + String funcName = rs.getString(3); + LOG.debug("Schema name is " + funcName); + } + } + } + + @Test + public void statement() throws SQLException { + try (Statement stmt = conn.createStatement()) { + stmt.cancel(); + } + + try (Statement stmt = conn.createStatement()) { + stmt.clearWarnings(); + + final String tableName = "odpi_jdbc_statement_test_table"; + + stmt.execute("drop table if exists " + tableName); + stmt.execute("create table " + tableName + " (a int, b varchar(32))"); + + stmt.executeUpdate("insert into " + tableName + " values (1, 'abc'), (2, 'def')"); + + int intrc = stmt.getUpdateCount(); + LOG.debug("Update count is " + intrc); + + ResultSet rs = stmt.executeQuery("select * from " + tableName); + while (rs.next()) { + LOG.debug("Fetched " + rs.getInt(1) + "," + rs.getString(2)); + } + + Connection localConn = stmt.getConnection(); + + intrc = stmt.getFetchDirection(); + LOG.debug("Fetch direction is " + intrc); + + intrc = stmt.getFetchSize(); + LOG.debug("Fetch size is " + intrc); + + intrc = stmt.getMaxRows(); + LOG.debug("max rows is " + intrc); + + boolean boolrc = stmt.getMoreResults(); + LOG.debug("more results is " + boolrc); + + intrc = stmt.getQueryTimeout(); + LOG.debug("query timeout is " + intrc); + + stmt.execute("select * from " + tableName); + rs = stmt.getResultSet(); + while (rs.next()) { + LOG.debug("Fetched " + rs.getInt(1) + "," + rs.getString(2)); + } + + intrc = stmt.getResultSetType(); + LOG.debug("result set type is " + intrc); + + SQLWarning warning = stmt.getWarnings(); + while (warning != null) { + LOG.debug("Found a warning: " + warning.getMessage()); + warning = warning.getNextWarning(); + } + + boolrc = stmt.isClosed(); + LOG.debug("is closed " + boolrc); + + boolrc = stmt.isCloseOnCompletion(); + LOG.debug("is close on completion " + boolrc); + + boolrc = stmt.isPoolable(); + LOG.debug("is poolable " + boolrc); + + stmt.setFetchDirection(ResultSet.FETCH_FORWARD); + stmt.setFetchSize(500); + stmt.setMaxRows(500); + } + } + + @Test + public void preparedStmtAndResultSet() throws SQLException { + final String tableName = "odpi_jdbc_psars_test_table"; + try (Statement stmt = conn.createStatement()) { + stmt.execute("drop table if exists " + tableName); + stmt.execute("create table " + tableName + " (bo boolean, ti tinyint, db double, fl float, " + + "i int, lo bigint, sh smallint, st varchar(32))"); + } + + // NOTE Hive 1.2 theoretically support binary, Date & Timestamp in JDBC, but I get errors when I + // try to put them in the query. + try (PreparedStatement ps = conn.prepareStatement("insert into " + tableName + + " values (?, ?, ?, ?, ?, ?, ?, ?)")) { + ps.setBoolean(1, true); + ps.setByte(2, (byte)1); + ps.setDouble(3, 3.141592654); + ps.setFloat(4, 3.14f); + ps.setInt(5, 3); + ps.setLong(6, 10L); + ps.setShort(7, (short)20); + ps.setString(8, "abc"); + ps.executeUpdate(); + } + + try (PreparedStatement ps = conn.prepareStatement("insert into " + tableName + " (i, st) " + + "values(?, ?)", ResultSet.TYPE_SCROLL_INSENSITIVE, ResultSet.CONCUR_READ_ONLY)) { + ps.setNull(1, Types.INTEGER); + ps.setObject(2, "mary had a little lamb"); + ps.executeUpdate(); + ps.setNull(1, Types.INTEGER, null); + ps.setString(2, "its fleece was white as snow"); + ps.clearParameters(); + ps.setNull(1, Types.INTEGER, null); + ps.setString(2, "its fleece was white as snow"); + ps.execute(); + + } + + try (Statement stmt = conn.createStatement()) { + + ResultSet rs = stmt.executeQuery("select * from " + tableName); + + ResultSetMetaData md = rs.getMetaData(); + + int colCnt = md.getColumnCount(); + LOG.debug("Column count is " + colCnt); + + for (int i = 1; i <= colCnt; i++) { + LOG.debug("Looking at column " + i); + String strrc = md.getColumnClassName(i); + LOG.debug("Column class name is " + strrc); + + int intrc = md.getColumnDisplaySize(i); + LOG.debug("Column display size is " + intrc); + + strrc = md.getColumnLabel(i); + LOG.debug("Column label is " + strrc); + + strrc = md.getColumnName(i); + LOG.debug("Column name is " + strrc); + + intrc = md.getColumnType(i); + LOG.debug("Column type is " + intrc); + + strrc = md.getColumnTypeName(i); + LOG.debug("Column type name is " + strrc); + + intrc = md.getPrecision(i); + LOG.debug("Precision is " + intrc); + + intrc = md.getScale(i); + LOG.debug("Scale is " + intrc); + + boolean boolrc = md.isAutoIncrement(i); + LOG.debug("Is auto increment? " + boolrc); + + boolrc = md.isCaseSensitive(i); + LOG.debug("Is case sensitive? " + boolrc); + + boolrc = md.isCurrency(i); + LOG.debug("Is currency? " + boolrc); + + intrc = md.getScale(i); + LOG.debug("Scale is " + intrc); + + intrc = md.isNullable(i); + LOG.debug("Is nullable? " + intrc); + + boolrc = md.isReadOnly(i); + LOG.debug("Is read only? " + boolrc); + + } + + while (rs.next()) { + LOG.debug("bo = " + rs.getBoolean(1)); + LOG.debug("bo = " + rs.getBoolean("bo")); + LOG.debug("ti = " + rs.getByte(2)); + LOG.debug("ti = " + rs.getByte("ti")); + LOG.debug("db = " + rs.getDouble(3)); + LOG.debug("db = " + rs.getDouble("db")); + LOG.debug("fl = " + rs.getFloat(4)); + LOG.debug("fl = " + rs.getFloat("fl")); + LOG.debug("i = " + rs.getInt(5)); + LOG.debug("i = " + rs.getInt("i")); + LOG.debug("lo = " + rs.getLong(6)); + LOG.debug("lo = " + rs.getLong("lo")); + LOG.debug("sh = " + rs.getShort(7)); + LOG.debug("sh = " + rs.getShort("sh")); + LOG.debug("st = " + rs.getString(8)); + LOG.debug("st = " + rs.getString("st")); + LOG.debug("tm = " + rs.getObject(8)); + LOG.debug("tm = " + rs.getObject("st")); + LOG.debug("tm was null " + rs.wasNull()); + } + LOG.debug("bo is column " + rs.findColumn("bo")); + + int intrc = rs.getConcurrency(); + LOG.debug("concurrency " + intrc); + + intrc = rs.getFetchDirection(); + LOG.debug("fetch direction " + intrc); + + intrc = rs.getType(); + LOG.debug("type " + intrc); + + Statement copy = rs.getStatement(); + + SQLWarning warning = rs.getWarnings(); + while (warning != null) { + LOG.debug("Found a warning: " + warning.getMessage()); + warning = warning.getNextWarning(); + } + rs.clearWarnings(); + } + } +} diff --git a/bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/TestSql.java b/bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/TestSql.java new file mode 100644 index 00000000..f2478412 --- /dev/null +++ b/bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/TestSql.java @@ -0,0 +1,337 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + *

+ * http://www.apache.org/licenses/LICENSE-2.0 + *

+ * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.odpi.specs.runtime.hive; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.junit.Test; + +import java.sql.SQLException; +import java.sql.Statement; + +// This does not test every option that Hive supports, but does try to touch the major +// options, especially anything unique to Hive. See each test for areas tested and not tested. +public class TestSql extends JdbcConnector { + private static final Log LOG = LogFactory.getLog(TestSql.class.getName()); + + @Test + public void db() throws SQLException { + final String db1 = "odpi_sql_db1"; + final String db2 = "odpi_sql_db2"; + try (Statement stmt = conn.createStatement()) { + stmt.execute("drop database if exists " + db1 + " cascade"); + + // Simple create database + stmt.execute("create database " + db1); + stmt.execute("drop database " + db1); + + stmt.execute("drop schema if exists " + db2 + " cascade"); + + String location = getProperty(LOCATION, "a writable directory in HDFS"); + + // All the bells and whistles + stmt.execute("create schema if not exists " + db2 + " comment 'a db' location '" + location + + "' with dbproperties ('a' = 'b')"); + + stmt.execute("alter database " + db2 + " set dbproperties ('c' = 'd')"); + + stmt.execute("drop database " + db2 + " restrict"); + } + } + + @Test + public void table() throws SQLException { + final String table1 = "odpi_sql_table1"; + final String table2 = "odpi_sql_table2"; + final String table3 = "odpi_sql_table3"; + final String table4 = "odpi_sql_table4"; + final String table5 = "odpi_sql_table5"; + + try (Statement stmt = conn.createStatement()) { + stmt.execute("drop table if exists " + table1); + stmt.execute("drop table if exists " + table2); + stmt.execute("drop table if exists " + table3); + stmt.execute("drop table if exists " + table4); + stmt.execute("drop table if exists " + table5); + + String location = getProperty(LOCATION, "a writable directory in HDFS"); + stmt.execute("create external table " + table1 + "(a int, b varchar(32)) location '" + + location + "'"); + + // With a little bit of everything, except partitions, we'll do those below + stmt.execute("create table if not exists " + table2 + + "(c1 tinyint," + + " c2 smallint," + + " c3 int comment 'a column comment'," + + " c4 bigint," + + " c5 float," + + " c6 double," + + " c7 decimal," + + " c8 decimal(12)," + + " c9 decimal(8,2)," + + " c10 timestamp," + + " c11 date," + + " c12 string," + + " c13 varchar(120)," + + " c14 char(10)," + + " c15 boolean," + + " c16 binary," + + " c17 array," + + " c18 map ," + + " c19 struct," + + " c20 uniontype) " + + "comment 'table comment'" + + "clustered by (c1) sorted by (c2) into 10 buckets " + + "stored as orc " + + "tblproperties ('a' = 'b')"); + + // Not testing SKEWED BY, ROW FORMAT, STORED BY (storage handler + + stmt.execute("create temporary table " + table3 + " like " + table2); + + stmt.execute("insert into " + table1 + " values (3, 'abc'), (4, 'def')"); + + stmt.execute("create table " + table4 + " as select a, b from " + table1); + + stmt.execute("truncate table " + table4); + + stmt.execute("alter table " + table4 + " rename to " + table5); + stmt.execute("alter table " + table2 + " set tblproperties ('c' = 'd')"); + + // Not testing alter of clustered or sorted by, because that's suicidal + // Not testing alter of skewed or serde properties since we didn't test it for create + // above. + + stmt.execute("drop table " + table1 + " purge"); + stmt.execute("drop table " + table2); + stmt.execute("drop table " + table3); + stmt.execute("drop table " + table5); + } + } + + @Test + public void partitionedTable() throws SQLException { + final String table1 = "odpi_sql_ptable1"; + try (Statement stmt = conn.createStatement()) { + stmt.execute("drop table if exists " + table1); + + stmt.execute("create table " + table1 + + "(c1 int," + + " c2 varchar(32))" + + "partitioned by (p1 string comment 'a partition column')" + + "stored as orc"); + + stmt.execute("alter table " + table1 + " add partition (p1 = 'a')"); + stmt.execute("insert into " + table1 + " partition (p1 = 'a') values (1, 'abc')"); + stmt.execute("insert into " + table1 + " partition (p1 = 'a') values (2, 'def')"); + stmt.execute("insert into " + table1 + " partition (p1 = 'a') values (3, 'ghi')"); + stmt.execute("alter table " + table1 + " partition (p1 = 'a') concatenate"); + stmt.execute("alter table " + table1 + " touch partition (p1 = 'a')"); + + stmt.execute("alter table " + table1 + " add columns (c3 float)"); + stmt.execute("alter table " + table1 + " drop partition (p1 = 'a')"); + + // Not testing rename partition, exchange partition, msck repair, archive/unarchive, + // set location, enable/disable no_drop/offline, compact (because not everyone may have + // ACID on), change column + + stmt.execute("drop table " + table1); + + } + } + + @Test + public void view() throws SQLException { + final String table1 = "odpi_sql_vtable1"; + final String view1 = "odpi_sql_view1"; + final String view2 = "odpi_sql_view2"; + try (Statement stmt = conn.createStatement()) { + stmt.execute("drop table if exists " + table1); + stmt.execute("drop view if exists " + view1); + stmt.execute("drop view if exists " + view2); + stmt.execute("create table " + table1 + "(a int, b varchar(32))"); + stmt.execute("create view " + view1 + " as select a from " + table1); + + stmt.execute("create view if not exists " + view2 + + " comment 'a view comment' " + + "tblproperties ('a' = 'b') " + + "as select b from " + table1); + + stmt.execute("alter view " + view1 + " as select a, b from " + table1); + stmt.execute("alter view " + view2 + " set tblproperties('c' = 'd')"); + + stmt.execute("drop view " + view1); + stmt.execute("drop view " + view2); + } + } + + // Not testing indices because they are currently useless in Hive + // Not testing macros because as far as I know no one uses them + + @Test + public void function() throws SQLException { + final String func1 = "odpi_sql_func1"; + final String func2 = "odpi_sql_func2"; + try (Statement stmt = conn.createStatement()) { + stmt.execute("create temporary function " + func1 + + " as 'org.apache.hadoop.hive.ql.udf.UDFToInteger'"); + stmt.execute("drop temporary function " + func1); + + stmt.execute("drop function if exists " + func2); + + stmt.execute("create function " + func2 + + " as 'org.apache.hadoop.hive.ql.udf.UDFToInteger'"); + stmt.execute("drop function " + func2); + } + } + + // Not testing grant/revoke/roles as different vendors use different security solutions + // and hence different things will work here. + + // This covers insert (non-partitioned, partitioned, dynamic partitions, overwrite, with + // values and select), and multi-insert. Load is not tested as there's no guarantee that the + // test machine has access to HDFS and thus the ability to upload a file. + @Test + public void insert() throws SQLException { + final String table1 = "odpi_insert_table1"; + final String table2 = "odpi_insert_table2"; + try (Statement stmt = conn.createStatement()) { + stmt.execute("drop table if exists " + table1); + stmt.execute("create table " + table1 + + "(c1 tinyint," + + " c2 smallint," + + " c3 int," + + " c4 bigint," + + " c5 float," + + " c6 double," + + " c7 decimal(8,2)," + + " c8 varchar(120)," + + " c9 char(10)," + + " c10 boolean)" + + " partitioned by (p1 string)"); + + // insert with partition + stmt.execute("explain insert into " + table1 + " partition (p1 = 'a') values " + + "(1, 2, 3, 4, 1.1, 2.2, 3.3, 'abcdef', 'ghi', true)," + + "(5, 6, 7, 8, 9.9, 8.8, 7.7, 'jklmno', 'pqr', true)"); + + stmt.execute("set hive.exec.dynamic.partition.mode=nonstrict"); + + // dynamic partition + stmt.execute("explain insert into " + table1 + " partition (p1) values " + + "(1, 2, 3, 4, 1.1, 2.2, 3.3, 'abcdef', 'ghi', true, 'b')," + + "(5, 6, 7, 8, 9.9, 8.8, 7.7, 'jklmno', 'pqr', true, 'b')"); + + stmt.execute("drop table if exists " + table2); + + stmt.execute("create table " + table2 + + "(c1 tinyint," + + " c2 smallint," + + " c3 int," + + " c4 bigint," + + " c5 float," + + " c6 double," + + " c7 decimal(8,2)," + + " c8 varchar(120)," + + " c9 char(10)," + + " c10 boolean)"); + + stmt.execute("explain insert into " + table2 + " values " + + "(1, 2, 3, 4, 1.1, 2.2, 3.3, 'abcdef', 'ghi', true)," + + "(5, 6, 7, 8, 9.9, 8.8, 7.7, 'jklmno', 'pqr', true)"); + + stmt.execute("explain insert overwrite table " + table2 + " select c1, c2, c3, c4, c5, c6, " + + "c7, c8, c9, c10 from " + table1); + + // multi-insert + stmt.execute("from " + table1 + + " insert into table " + table1 + " partition (p1 = 'c') " + + " select c1, c2, c3, c4, c5, c6, c7, c8, c9, c10" + + " insert into table " + table2 + " select c1, c2, c3, c4, c5, c6, c7, c8, c9, c10"); + } + } + + // This tests CTEs + @Test + public void cte() throws SQLException { + final String table1 = "odpi_cte_table1"; + try (Statement stmt = conn.createStatement()) { + stmt.execute("drop table if exists " + table1); + stmt.execute("create table " + table1 + "(c1 int, c2 varchar(32))"); + stmt.execute("with cte1 as (select c1 from " + table1 + " where c1 < 10) " + + " select c1 from cte1"); + } + } + + // This tests select, including CTEs, all/distinct, single tables, joins (inner & outer), + // group by (w/ and w/o having), order by, cluster by/distribute by/sort by, limit, union, + // subqueries, and over. + + @Test + public void select() throws SQLException { + final String[] tables = {"odpi_select_table1", "odpi_select_table2"}; + try (Statement stmt = conn.createStatement()) { + for (int i = 0; i < tables.length; i++) { + stmt.execute("drop table if exists " + tables[i]); + stmt.execute("create table " + tables[i] + "(c1 int, c2 varchar(32))"); + } + + // single table queries tested above in several places + + stmt.execute("explain select all a.c2, SUM(a.c1), SUM(b.c1) " + + "from " + tables[0] + " a join " + tables[1] + " b on (a.c2 = b.c2) " + + "group by a.c2 " + + "order by a.c2 asc " + + "limit 10"); + + stmt.execute("explain select distinct a.c2 " + + "from " + tables[0] + " a left outer join " + tables[1] + " b on (a.c2 = b.c2) " + + "order by a.c2 desc "); + + stmt.execute("explain select a.c2, SUM(a.c1) " + + "from " + tables[0] + " a right outer join " + tables[1] + " b on (a.c2 = b.c2) " + + "group by a.c2 " + + "having SUM(b.c1) > 0 " + + "order by a.c2 "); + + stmt.execute("explain select a.c2, rank() over (partition by a.c1) " + + "from " + tables[0] + " a full outer join " + tables[1] + " b on (a.c2 = b.c2) "); + + stmt.execute("explain select c2 from " + tables[0] + " union all select c2 from " + tables[1]); + + stmt.execute("explain select * from " + tables[0] + " distribute by c1 sort by c2"); + stmt.execute("explain select * from " + tables[0] + " cluster by c1"); + + stmt.execute("explain select * from (select c1 from " + tables[0] + ") t"); + stmt.execute("explain select * from " + tables[0] + " where c1 in (select c1 from " + tables[1] + + ")"); + + } + + } + + // Update and delete are not tested because not everyone configures their system to run + // with ACID. + + +} + + + + + diff --git a/bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/TestThrift.java b/bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/TestThrift.java new file mode 100644 index 00000000..8e0abda4 --- /dev/null +++ b/bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/TestThrift.java @@ -0,0 +1,251 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + *

+ * http://www.apache.org/licenses/LICENSE-2.0 + *

+ * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.odpi.specs.runtime.hive; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.apache.hadoop.hive.conf.HiveConf; +import org.apache.hadoop.hive.metastore.HiveMetaStoreClient; +import org.apache.hadoop.hive.metastore.IMetaStoreClient; +import org.apache.hadoop.hive.metastore.TableType; +import org.apache.hadoop.hive.metastore.api.Database; +import org.apache.hadoop.hive.metastore.api.FieldSchema; +import org.apache.hadoop.hive.metastore.api.MetaException; +import org.apache.hadoop.hive.metastore.api.Partition; +import org.apache.hadoop.hive.metastore.api.SerDeInfo; +import org.apache.hadoop.hive.metastore.api.StorageDescriptor; +import org.apache.hadoop.hive.metastore.api.Table; +import org.apache.thrift.TException; +import org.junit.Assert; +import org.junit.Assume; +import org.junit.Before; +import org.junit.BeforeClass; +import org.junit.Test; + +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Random; + +public class TestThrift { + + private static final Log LOG = LogFactory.getLog(TestThrift.class.getName()); + + private static IMetaStoreClient client = null; + private static HiveConf conf; + + private Random rand; + + @BeforeClass + public static void connect() throws MetaException { + if (JdbcConnector.testActive(JdbcConnector.TEST_THRIFT, "Test Thrift ")) { + String url = JdbcConnector.getProperty(JdbcConnector.METASTORE_URL, "Thrift metastore URL"); + conf = new HiveConf(); + conf.setVar(HiveConf.ConfVars.METASTOREURIS, url); + LOG.info("Set to test against metastore at " + url); + client = new HiveMetaStoreClient(conf); + } + } + + @Before + public void checkIfActive() { + Assume.assumeTrue(JdbcConnector.testActive(JdbcConnector.TEST_THRIFT, "Test Thrift ")); + rand = new Random(); + } + + @Test + public void db() throws TException { + final String dbName = "odpi_thrift_db_" + rand.nextInt(Integer.MAX_VALUE); + + Database db = new Database(dbName, "a db", null, new HashMap()); + client.createDatabase(db); + db = client.getDatabase(dbName); + Assert.assertNotNull(db); + db = new Database(db); + db.getParameters().put("a", "b"); + client.alterDatabase(dbName, db); + List alldbs = client.getDatabases("odpi_*"); + Assert.assertNotNull(alldbs); + Assert.assertTrue(alldbs.size() > 0); + alldbs = client.getAllDatabases(); + Assert.assertNotNull(alldbs); + Assert.assertTrue(alldbs.size() > 0); + client.dropDatabase(dbName, true, true); + } + + // Not testing types calls, as they aren't used AFAIK + + @Test + public void nonPartitionedTable() throws TException { + final String tableName = "odpi_thrift_table_" + rand.nextInt(Integer.MAX_VALUE); + + // I don't test every operation related to tables, but only those that are frequently used. + SerDeInfo serde = new SerDeInfo("default_serde", + conf.getVar(HiveConf.ConfVars.HIVEDEFAULTSERDE), new HashMap()); + FieldSchema fs = new FieldSchema("a", "int", "no comment"); + StorageDescriptor sd = new StorageDescriptor(Collections.singletonList(fs), null, + conf.getVar(HiveConf.ConfVars.HIVEDEFAULTFILEFORMAT), + conf.getVar(HiveConf.ConfVars.HIVEDEFAULTFILEFORMAT), false, 0, serde, null, null, + new HashMap()); + Table table = new Table(tableName, "default", "me", 0, 0, 0, sd, null, + new HashMap(), null, null, TableType.MANAGED_TABLE.toString()); + client.createTable(table); + + table = client.getTable("default", tableName); + Assert.assertNotNull(table); + + List tables = + client.getTableObjectsByName("default", Collections.singletonList(tableName)); + Assert.assertNotNull(tables); + Assert.assertEquals(1, tables.size()); + + List tableNames = client.getTables("default", "odpi_*"); + Assert.assertNotNull(tableNames); + Assert.assertTrue(tableNames.size() >= 1); + + tableNames = client.getAllTables("default"); + Assert.assertNotNull(tableNames); + Assert.assertTrue(tableNames.size() >= 1); + + List cols = client.getFields("default", tableName); + Assert.assertNotNull(cols); + Assert.assertEquals(1, cols.size()); + + cols = client.getSchema("default", tableName); + Assert.assertNotNull(cols); + Assert.assertEquals(1, cols.size()); + + table = new Table(table); + table.getParameters().put("a", "b"); + client.alter_table("default", tableName, table, false); + + table.getParameters().put("c", "d"); + client.alter_table("default", tableName, table); + + client.dropTable("default", tableName, true, false); + } + + @Test + public void partitionedTable() throws TException { + final String tableName = "odpi_thrift_partitioned_table_" + rand.nextInt(Integer.MAX_VALUE); + + // I don't test every operation related to tables, but only those that are frequently used. + SerDeInfo serde = new SerDeInfo("default_serde", + conf.getVar(HiveConf.ConfVars.HIVEDEFAULTSERDE), new HashMap()); + FieldSchema fs = new FieldSchema("a", "int", "no comment"); + StorageDescriptor sd = new StorageDescriptor(Collections.singletonList(fs), null, + conf.getVar(HiveConf.ConfVars.HIVEDEFAULTFILEFORMAT), + conf.getVar(HiveConf.ConfVars.HIVEDEFAULTFILEFORMAT), false, 0, serde, null, null, + new HashMap()); + FieldSchema pk = new FieldSchema("pk", "string", ""); + Table table = new Table(tableName, "default", "me", 0, 0, 0, sd, Collections.singletonList(pk), + new HashMap(), null, null, TableType.MANAGED_TABLE.toString()); + client.createTable(table); + + sd = new StorageDescriptor(Collections.singletonList(fs), null, + conf.getVar(HiveConf.ConfVars.HIVEDEFAULTSERDE), + conf.getVar(HiveConf.ConfVars.HIVEDEFAULTSERDE), false, 0, serde, null, null, + new HashMap()); + Partition partition = new Partition(Collections.singletonList("x"), "default", tableName, 0, + 0, sd, new HashMap()); + client.add_partition(partition); + + List partitions = new ArrayList<>(2); + sd = new StorageDescriptor(Collections.singletonList(fs), null, + conf.getVar(HiveConf.ConfVars.HIVEDEFAULTSERDE), + conf.getVar(HiveConf.ConfVars.HIVEDEFAULTSERDE), false, 0, serde, null, null, + new HashMap()); + partitions.add(new Partition(Collections.singletonList("y"), "default", tableName, 0, + 0, sd, new HashMap())); + sd = new StorageDescriptor(Collections.singletonList(fs), null, + conf.getVar(HiveConf.ConfVars.HIVEDEFAULTSERDE), + conf.getVar(HiveConf.ConfVars.HIVEDEFAULTSERDE), false, 0, serde, null, null, + new HashMap()); + partitions.add(new Partition(Collections.singletonList("z"), "default", tableName, 0, + 0, sd, new HashMap())); + client.add_partitions(partitions); + + List parts = client.listPartitions("default", tableName, (short)-1); + Assert.assertNotNull(parts); + Assert.assertEquals(3, parts.size()); + + parts = client.listPartitions("default", tableName, Collections.singletonList("x"), + (short)-1); + Assert.assertNotNull(parts); + Assert.assertEquals(1, parts.size()); + + parts = client.listPartitionsWithAuthInfo("default", tableName, (short)-1, "me", + Collections.emptyList()); + Assert.assertNotNull(parts); + Assert.assertEquals(3, parts.size()); + + List partNames = client.listPartitionNames("default", tableName, (short)-1); + Assert.assertNotNull(partNames); + Assert.assertEquals(3, partNames.size()); + + parts = client.listPartitionsByFilter("default", tableName, "pk = \"x\"", (short)-1); + Assert.assertNotNull(parts); + Assert.assertEquals(1, parts.size()); + + parts = client.getPartitionsByNames("default", tableName, Collections.singletonList("pk=x")); + Assert.assertNotNull(parts); + Assert.assertEquals(1, parts.size()); + + partition = client.getPartition("default", tableName, Collections.singletonList("x")); + Assert.assertNotNull(partition); + + partition = client.getPartition("default", tableName, "pk=x"); + Assert.assertNotNull(partition); + + partition = client.getPartitionWithAuthInfo("default", tableName, Collections.singletonList("x"), + "me", Collections.emptyList()); + Assert.assertNotNull(partition); + + partition = new Partition(partition); + partition.getParameters().put("a", "b"); + client.alter_partition("default", tableName, partition); + + for (Partition p : parts) p.getParameters().put("c", "d"); + client.alter_partitions("default", tableName, parts); + + // Not testing get_partitions_by_expr because I don't want to hard code some byte sequence + // from the parser. The odds that anyone other than Hive parser would call this method seem + // low, since you'd have to exactly match the serliazation of the Hive parser. + + // Not testing partition marking events, not used by anyone but Hive replication AFAIK + + client.dropPartition("default", tableName, "pk=x", true); + client.dropPartition("default", tableName, Collections.singletonList("y"), true); + } + + // Not testing index calls, as no one uses indices + + + // Not sure if anyone uses stats calls or not. Other query engines might. Ignoring for now. + + // Not sure if anyone else uses functions, though I'm guessing not as without Hive classes they + // won't be runable. + + // Not testing authorization calls as AFAIK no one else uses Hive security + + // Not testing transaction/locking calls, as those are used only by Hive. + + // Not testing notification logging calls, as those are used only by Hive replication. + +} diff --git a/bigtop-tests/smoke-tests/odpi-runtime/src/test/python/find-public-apis.py b/bigtop-tests/smoke-tests/odpi-runtime/src/test/python/find-public-apis.py new file mode 100755 index 00000000..091c496e --- /dev/null +++ b/bigtop-tests/smoke-tests/odpi-runtime/src/test/python/find-public-apis.py @@ -0,0 +1,80 @@ +#!/usr/bin/python + +''' +Licensed to the Apache Software Foundation (ASF) under one +or more contributor license agreements. See the NOTICE file +distributed with this work for additional information +regarding copyright ownership. The ASF licenses this file +to you under the Apache License, Version 2.0 (the +"License"); you may not use this file except in compliance +with the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +''' + +import os +import re +import warnings +from optparse import OptionParser + +def main(): + parser = OptionParser() + parser.add_option("-d", "--directory", help="Top level directory of source tree") + parser.add_option("-r", "--report", help="API compatibility report file, in HTML format") + + (options, args) = parser.parse_args() + + # Get the ATS endpoint if it's not given. + if options.directory == None: + print "You must specify a top level directory of the source tree" + return 1 + + if options.report == None: + print "You must specify the report to check against" + return 1 + + publicClasses = set() + for directory in os.walk(options.directory): + for afile in directory[2]: + if re.search("\.java$", afile) != None: + handle = open(os.path.join(directory[0], afile)) + # Figure out the package we're in + pre = re.search("org/apache/hadoop[\w/]*", directory[0]) + if pre == None: + warnings.warn("No package for " + directory[0]) + continue + package = pre.group(0) + expecting = 0 + for line in handle: + if re.search("@InterfaceAudience.Public", line) != None: + expecting = 1 + classname = re.search("class (\w*)", line) + if classname != None and expecting == 1: + publicClasses.add(package + "/" + classname.group(1)) + expecting = 0 + handle.close() + + handle = open(options.report) + haveChecked = set() + for line in handle: + classre = re.search("mangled: (org/apache/hadoop[\w/]+)", line) + if classre != None: + classname = classre.group(1) + if classname not in haveChecked: + if classname in publicClasses: + print "Warning, found change in public class " + classname + haveChecked.add(classname) + handle.close() + + + + +main() + + diff --git a/bigtop-tests/smoke-tests/odpi-runtime/src/test/resources/hadoop-common-2.7.3-api-report.json b/bigtop-tests/smoke-tests/odpi-runtime/src/test/resources/hadoop-common-2.7.3-api-report.json new file mode 100644 index 00000000..6a6c7af7 --- /dev/null +++ b/bigtop-tests/smoke-tests/odpi-runtime/src/test/resources/hadoop-common-2.7.3-api-report.json @@ -0,0 +1 @@ +{"name":"hadoop-common","version":"2.7.3","classes":{"org.apache.hadoop.record.RecordInput":{"name":"org.apache.hadoop.record.RecordInput","methods":{"long readLong(java.lang.String) throws java.io.IOException":{"name":"readLong","returnType":"long","args":["java.lang.String"],"exceptions":["java.io.IOException"]},"void endMap(java.lang.String) throws java.io.IOException":{"name":"endMap","returnType":"void","args":["java.lang.String"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.record.Buffer readBuffer(java.lang.String) throws java.io.IOException":{"name":"readBuffer","returnType":"org.apache.hadoop.record.Buffer","args":["java.lang.String"],"exceptions":["java.io.IOException"]},"void endVector(java.lang.String) throws java.io.IOException":{"name":"endVector","returnType":"void","args":["java.lang.String"],"exceptions":["java.io.IOException"]},"byte readByte(java.lang.String) throws java.io.IOException":{"name":"readByte","returnType":"byte","args":["java.lang.String"],"exceptions":["java.io.IOException"]},"float readFloat(java.lang.String) throws java.io.IOException":{"name":"readFloat","returnType":"float","args":["java.lang.String"],"exceptions":["java.io.IOException"]},"void startRecord(java.lang.String) throws java.io.IOException":{"name":"startRecord","returnType":"void","args":["java.lang.String"],"exceptions":["java.io.IOException"]},"double readDouble(java.lang.String) throws java.io.IOException":{"name":"readDouble","returnType":"double","args":["java.lang.String"],"exceptions":["java.io.IOException"]},"void endRecord(java.lang.String) throws java.io.IOException":{"name":"endRecord","returnType":"void","args":["java.lang.String"],"exceptions":["java.io.IOException"]},"java.lang.String readString(java.lang.String) throws java.io.IOException":{"name":"readString","returnType":"java.lang.String","args":["java.lang.String"],"exceptions":["java.io.IOException"]},"int readInt(java.lang.String) throws java.io.IOException":{"name":"readInt","returnType":"int","args":["java.lang.String"],"exceptions":["java.io.IOException"]},"boolean readBool(java.lang.String) throws java.io.IOException":{"name":"readBool","returnType":"boolean","args":["java.lang.String"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.record.Index startVector(java.lang.String) throws java.io.IOException":{"name":"startVector","returnType":"org.apache.hadoop.record.Index","args":["java.lang.String"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.record.Index startMap(java.lang.String) throws java.io.IOException":{"name":"startMap","returnType":"org.apache.hadoop.record.Index","args":["java.lang.String"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.io.NullWritable":{"name":"org.apache.hadoop.io.NullWritable","methods":{"int hashCode()":{"name":"hashCode","returnType":"int","args":[],"exceptions":[]},"int compareTo(org.apache.hadoop.io.NullWritable)":{"name":"compareTo","returnType":"int","args":["org.apache.hadoop.io.NullWritable"],"exceptions":[]},"java.lang.String toString()":{"name":"toString","returnType":"java.lang.String","args":[],"exceptions":[]},"void write(java.io.DataOutput) throws java.io.IOException":{"name":"write","returnType":"void","args":["java.io.DataOutput"],"exceptions":["java.io.IOException"]},"int compareTo(java.lang.Object)":{"name":"compareTo","returnType":"int","args":["java.lang.Object"],"exceptions":[]},"org.apache.hadoop.io.NullWritable get()":{"name":"get","returnType":"org.apache.hadoop.io.NullWritable","args":[],"exceptions":[]},"boolean equals(java.lang.Object)":{"name":"equals","returnType":"boolean","args":["java.lang.Object"],"exceptions":[]},"void readFields(java.io.DataInput) throws java.io.IOException":{"name":"readFields","returnType":"void","args":["java.io.DataInput"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.record.XmlRecordInput":{"name":"org.apache.hadoop.record.XmlRecordInput","methods":{"long readLong(java.lang.String) throws java.io.IOException":{"name":"readLong","returnType":"long","args":["java.lang.String"],"exceptions":["java.io.IOException"]},"void endMap(java.lang.String) throws java.io.IOException":{"name":"endMap","returnType":"void","args":["java.lang.String"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.record.Buffer readBuffer(java.lang.String) throws java.io.IOException":{"name":"readBuffer","returnType":"org.apache.hadoop.record.Buffer","args":["java.lang.String"],"exceptions":["java.io.IOException"]},"void endVector(java.lang.String) throws java.io.IOException":{"name":"endVector","returnType":"void","args":["java.lang.String"],"exceptions":["java.io.IOException"]},"byte readByte(java.lang.String) throws java.io.IOException":{"name":"readByte","returnType":"byte","args":["java.lang.String"],"exceptions":["java.io.IOException"]},"float readFloat(java.lang.String) throws java.io.IOException":{"name":"readFloat","returnType":"float","args":["java.lang.String"],"exceptions":["java.io.IOException"]},"void startRecord(java.lang.String) throws java.io.IOException":{"name":"startRecord","returnType":"void","args":["java.lang.String"],"exceptions":["java.io.IOException"]},"double readDouble(java.lang.String) throws java.io.IOException":{"name":"readDouble","returnType":"double","args":["java.lang.String"],"exceptions":["java.io.IOException"]},"void endRecord(java.lang.String) throws java.io.IOException":{"name":"endRecord","returnType":"void","args":["java.lang.String"],"exceptions":["java.io.IOException"]},"java.lang.String readString(java.lang.String) throws java.io.IOException":{"name":"readString","returnType":"java.lang.String","args":["java.lang.String"],"exceptions":["java.io.IOException"]},"int readInt(java.lang.String) throws java.io.IOException":{"name":"readInt","returnType":"int","args":["java.lang.String"],"exceptions":["java.io.IOException"]},"boolean readBool(java.lang.String) throws java.io.IOException":{"name":"readBool","returnType":"boolean","args":["java.lang.String"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.record.Index startVector(java.lang.String) throws java.io.IOException":{"name":"startVector","returnType":"org.apache.hadoop.record.Index","args":["java.lang.String"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.record.Index startMap(java.lang.String) throws java.io.IOException":{"name":"startMap","returnType":"org.apache.hadoop.record.Index","args":["java.lang.String"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.fs.FileSystem":{"name":"org.apache.hadoop.fs.FileSystem","methods":{"org.apache.hadoop.security.token.Token getDelegationToken(java.lang.String) throws java.io.IOException":{"name":"getDelegationToken","returnType":"org.apache.hadoop.security.token.Token","args":["java.lang.String"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.FileSystem get(org.apache.hadoop.conf.Configuration) throws java.io.IOException":{"name":"get","returnType":"org.apache.hadoop.fs.FileSystem","args":["org.apache.hadoop.conf.Configuration"],"exceptions":["java.io.IOException"]},"short getDefaultReplication(org.apache.hadoop.fs.Path)":{"name":"getDefaultReplication","returnType":"short","args":["org.apache.hadoop.fs.Path"],"exceptions":[]},"[B getXAttr(org.apache.hadoop.fs.Path, java.lang.String) throws java.io.IOException":{"name":"getXAttr","returnType":"[B","args":["org.apache.hadoop.fs.Path","java.lang.String"],"exceptions":["java.io.IOException"]},"boolean setReplication(org.apache.hadoop.fs.Path, short) throws java.io.IOException":{"name":"setReplication","returnType":"boolean","args":["org.apache.hadoop.fs.Path","short"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.ContentSummary getContentSummary(org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"getContentSummary","returnType":"org.apache.hadoop.fs.ContentSummary","args":["org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.FSDataOutputStream create(org.apache.hadoop.fs.Path, boolean, int, short, long, org.apache.hadoop.util.Progressable) throws java.io.IOException":{"name":"create","returnType":"org.apache.hadoop.fs.FSDataOutputStream","args":["org.apache.hadoop.fs.Path","boolean","int","short","long","org.apache.hadoop.util.Progressable"],"exceptions":["java.io.IOException"]},"[Lorg.apache.hadoop.fs.FileStatus; globStatus(org.apache.hadoop.fs.Path, org.apache.hadoop.fs.PathFilter) throws java.io.IOException":{"name":"globStatus","returnType":"[Lorg.apache.hadoop.fs.FileStatus;","args":["org.apache.hadoop.fs.Path","org.apache.hadoop.fs.PathFilter"],"exceptions":["java.io.IOException"]},"[Lorg.apache.hadoop.fs.FileSystem; getChildFileSystems()":{"name":"getChildFileSystems","returnType":"[Lorg.apache.hadoop.fs.FileSystem;","args":[],"exceptions":[]},"org.apache.hadoop.fs.FSDataOutputStream create(org.apache.hadoop.fs.Path, org.apache.hadoop.fs.permission.FsPermission, java.util.EnumSet, int, short, long, org.apache.hadoop.util.Progressable) throws java.io.IOException":{"name":"create","returnType":"org.apache.hadoop.fs.FSDataOutputStream","args":["org.apache.hadoop.fs.Path","org.apache.hadoop.fs.permission.FsPermission","java.util.EnumSet","int","short","long","org.apache.hadoop.util.Progressable"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.FSDataOutputStream create(org.apache.hadoop.fs.Path, org.apache.hadoop.util.Progressable) throws java.io.IOException":{"name":"create","returnType":"org.apache.hadoop.fs.FSDataOutputStream","args":["org.apache.hadoop.fs.Path","org.apache.hadoop.util.Progressable"],"exceptions":["java.io.IOException"]},"java.lang.Class getFileSystemClass(java.lang.String, org.apache.hadoop.conf.Configuration) throws java.io.IOException":{"name":"getFileSystemClass","returnType":"java.lang.Class","args":["java.lang.String","org.apache.hadoop.conf.Configuration"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.FileSystem newInstance(java.net.URI, org.apache.hadoop.conf.Configuration, java.lang.String) throws java.lang.InterruptedException, java.io.IOException":{"name":"newInstance","returnType":"org.apache.hadoop.fs.FileSystem","args":["java.net.URI","org.apache.hadoop.conf.Configuration","java.lang.String"],"exceptions":["java.lang.InterruptedException","java.io.IOException"]},"java.util.Map getStatistics()":{"name":"getStatistics","returnType":"java.util.Map","args":[],"exceptions":[]},"org.apache.hadoop.fs.RemoteIterator listStatusIterator(org.apache.hadoop.fs.Path) throws java.io.IOException, java.io.FileNotFoundException":{"name":"listStatusIterator","returnType":"org.apache.hadoop.fs.RemoteIterator","args":["org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException","java.io.FileNotFoundException"]},"org.apache.hadoop.fs.FileSystem$Statistics getStatistics(java.lang.String, java.lang.Class)":{"name":"getStatistics","returnType":"org.apache.hadoop.fs.FileSystem$Statistics","args":["java.lang.String","java.lang.Class"],"exceptions":[]},"boolean isFile(org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"isFile","returnType":"boolean","args":["org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"void renameSnapshot(org.apache.hadoop.fs.Path, java.lang.String, java.lang.String) throws java.io.IOException":{"name":"renameSnapshot","returnType":"void","args":["org.apache.hadoop.fs.Path","java.lang.String","java.lang.String"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.FSDataOutputStream createNonRecursive(org.apache.hadoop.fs.Path, boolean, int, short, long, org.apache.hadoop.util.Progressable) throws java.io.IOException":{"name":"createNonRecursive","returnType":"org.apache.hadoop.fs.FSDataOutputStream","args":["org.apache.hadoop.fs.Path","boolean","int","short","long","org.apache.hadoop.util.Progressable"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.FsStatus getStatus(org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"getStatus","returnType":"org.apache.hadoop.fs.FsStatus","args":["org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"boolean mkdirs(org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"mkdirs","returnType":"boolean","args":["org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.Path createSnapshot(org.apache.hadoop.fs.Path, java.lang.String) throws java.io.IOException":{"name":"createSnapshot","returnType":"org.apache.hadoop.fs.Path","args":["org.apache.hadoop.fs.Path","java.lang.String"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.RemoteIterator listFiles(org.apache.hadoop.fs.Path, boolean) throws java.io.IOException, java.io.FileNotFoundException":{"name":"listFiles","returnType":"org.apache.hadoop.fs.RemoteIterator","args":["org.apache.hadoop.fs.Path","boolean"],"exceptions":["java.io.IOException","java.io.FileNotFoundException"]},"void copyToLocalFile(org.apache.hadoop.fs.Path, org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"copyToLocalFile","returnType":"void","args":["org.apache.hadoop.fs.Path","org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"boolean areSymlinksEnabled()":{"name":"areSymlinksEnabled","returnType":"boolean","args":[],"exceptions":[]},"boolean createNewFile(org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"createNewFile","returnType":"boolean","args":["org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.FSDataOutputStream append(org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"append","returnType":"org.apache.hadoop.fs.FSDataOutputStream","args":["org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"void removeAcl(org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"removeAcl","returnType":"void","args":["org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"boolean mkdirs(org.apache.hadoop.fs.Path, org.apache.hadoop.fs.permission.FsPermission) throws java.io.IOException":{"name":"mkdirs","returnType":"boolean","args":["org.apache.hadoop.fs.Path","org.apache.hadoop.fs.permission.FsPermission"],"exceptions":["java.io.IOException"]},"[Lorg.apache.hadoop.fs.BlockLocation; getFileBlockLocations(org.apache.hadoop.fs.Path, long, long) throws java.io.IOException":{"name":"getFileBlockLocations","returnType":"[Lorg.apache.hadoop.fs.BlockLocation;","args":["org.apache.hadoop.fs.Path","long","long"],"exceptions":["java.io.IOException"]},"boolean deleteOnExit(org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"deleteOnExit","returnType":"boolean","args":["org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.FSDataOutputStream createNonRecursive(org.apache.hadoop.fs.Path, org.apache.hadoop.fs.permission.FsPermission, boolean, int, short, long, org.apache.hadoop.util.Progressable) throws java.io.IOException":{"name":"createNonRecursive","returnType":"org.apache.hadoop.fs.FSDataOutputStream","args":["org.apache.hadoop.fs.Path","org.apache.hadoop.fs.permission.FsPermission","boolean","int","short","long","org.apache.hadoop.util.Progressable"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.LocalFileSystem getLocal(org.apache.hadoop.conf.Configuration) throws java.io.IOException":{"name":"getLocal","returnType":"org.apache.hadoop.fs.LocalFileSystem","args":["org.apache.hadoop.conf.Configuration"],"exceptions":["java.io.IOException"]},"void setDefaultUri(org.apache.hadoop.conf.Configuration, java.lang.String)":{"name":"setDefaultUri","returnType":"void","args":["org.apache.hadoop.conf.Configuration","java.lang.String"],"exceptions":[]},"org.apache.hadoop.fs.permission.AclStatus getAclStatus(org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"getAclStatus","returnType":"org.apache.hadoop.fs.permission.AclStatus","args":["org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"boolean mkdirs(org.apache.hadoop.fs.FileSystem, org.apache.hadoop.fs.Path, org.apache.hadoop.fs.permission.FsPermission) throws java.io.IOException":{"name":"mkdirs","returnType":"boolean","args":["org.apache.hadoop.fs.FileSystem","org.apache.hadoop.fs.Path","org.apache.hadoop.fs.permission.FsPermission"],"exceptions":["java.io.IOException"]},"void printStatistics() throws java.io.IOException":{"name":"printStatistics","returnType":"void","args":[],"exceptions":["java.io.IOException"]},"void setOwner(org.apache.hadoop.fs.Path, java.lang.String, java.lang.String) throws java.io.IOException":{"name":"setOwner","returnType":"void","args":["org.apache.hadoop.fs.Path","java.lang.String","java.lang.String"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.FsServerDefaults getServerDefaults(org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"getServerDefaults","returnType":"org.apache.hadoop.fs.FsServerDefaults","args":["org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"void moveFromLocalFile(org.apache.hadoop.fs.Path, org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"moveFromLocalFile","returnType":"void","args":["org.apache.hadoop.fs.Path","org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.FSDataOutputStream create(org.apache.hadoop.fs.FileSystem, org.apache.hadoop.fs.Path, org.apache.hadoop.fs.permission.FsPermission) throws java.io.IOException":{"name":"create","returnType":"org.apache.hadoop.fs.FSDataOutputStream","args":["org.apache.hadoop.fs.FileSystem","org.apache.hadoop.fs.Path","org.apache.hadoop.fs.permission.FsPermission"],"exceptions":["java.io.IOException"]},"void deleteSnapshot(org.apache.hadoop.fs.Path, java.lang.String) throws java.io.IOException":{"name":"deleteSnapshot","returnType":"void","args":["org.apache.hadoop.fs.Path","java.lang.String"],"exceptions":["java.io.IOException"]},"long getDefaultBlockSize()":{"name":"getDefaultBlockSize","returnType":"long","args":[],"exceptions":[]},"org.apache.hadoop.fs.FSDataInputStream open(org.apache.hadoop.fs.Path, int) throws java.io.IOException":{"name":"open","returnType":"org.apache.hadoop.fs.FSDataInputStream","args":["org.apache.hadoop.fs.Path","int"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.FsStatus getStatus() throws java.io.IOException":{"name":"getStatus","returnType":"org.apache.hadoop.fs.FsStatus","args":[],"exceptions":["java.io.IOException"]},"java.net.URI getUri()":{"name":"getUri","returnType":"java.net.URI","args":[],"exceptions":[]},"[Lorg.apache.hadoop.security.token.Token; addDelegationTokens(java.lang.String, org.apache.hadoop.security.Credentials) throws java.io.IOException":{"name":"addDelegationTokens","returnType":"[Lorg.apache.hadoop.security.token.Token;","args":["java.lang.String","org.apache.hadoop.security.Credentials"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.Path getHomeDirectory()":{"name":"getHomeDirectory","returnType":"org.apache.hadoop.fs.Path","args":[],"exceptions":[]},"void completeLocalOutput(org.apache.hadoop.fs.Path, org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"completeLocalOutput","returnType":"void","args":["org.apache.hadoop.fs.Path","org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"boolean rename(org.apache.hadoop.fs.Path, org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"rename","returnType":"boolean","args":["org.apache.hadoop.fs.Path","org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.FileStatus getFileStatus(org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"getFileStatus","returnType":"org.apache.hadoop.fs.FileStatus","args":["org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"void enableSymlinks()":{"name":"enableSymlinks","returnType":"void","args":[],"exceptions":[]},"void moveToLocalFile(org.apache.hadoop.fs.Path, org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"moveToLocalFile","returnType":"void","args":["org.apache.hadoop.fs.Path","org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.RemoteIterator listCorruptFileBlocks(org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"listCorruptFileBlocks","returnType":"org.apache.hadoop.fs.RemoteIterator","args":["org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"void setAcl(org.apache.hadoop.fs.Path, java.util.List) throws java.io.IOException":{"name":"setAcl","returnType":"void","args":["org.apache.hadoop.fs.Path","java.util.List"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.Path createSnapshot(org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"createSnapshot","returnType":"org.apache.hadoop.fs.Path","args":["org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.FileChecksum getFileChecksum(org.apache.hadoop.fs.Path, long) throws java.io.IOException":{"name":"getFileChecksum","returnType":"org.apache.hadoop.fs.FileChecksum","args":["org.apache.hadoop.fs.Path","long"],"exceptions":["java.io.IOException"]},"void setPermission(org.apache.hadoop.fs.Path, org.apache.hadoop.fs.permission.FsPermission) throws java.io.IOException":{"name":"setPermission","returnType":"void","args":["org.apache.hadoop.fs.Path","org.apache.hadoop.fs.permission.FsPermission"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.Path startLocalOutput(org.apache.hadoop.fs.Path, org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"startLocalOutput","returnType":"org.apache.hadoop.fs.Path","args":["org.apache.hadoop.fs.Path","org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"void setWriteChecksum(boolean)":{"name":"setWriteChecksum","returnType":"void","args":["boolean"],"exceptions":[]},"java.lang.String getScheme()":{"name":"getScheme","returnType":"java.lang.String","args":[],"exceptions":[]},"boolean delete(org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"delete","returnType":"boolean","args":["org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"boolean isDirectory(org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"isDirectory","returnType":"boolean","args":["org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"void copyToLocalFile(boolean, org.apache.hadoop.fs.Path, org.apache.hadoop.fs.Path, boolean) throws java.io.IOException":{"name":"copyToLocalFile","returnType":"void","args":["boolean","org.apache.hadoop.fs.Path","org.apache.hadoop.fs.Path","boolean"],"exceptions":["java.io.IOException"]},"void copyFromLocalFile(org.apache.hadoop.fs.Path, org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"copyFromLocalFile","returnType":"void","args":["org.apache.hadoop.fs.Path","org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"short getDefaultReplication()":{"name":"getDefaultReplication","returnType":"short","args":[],"exceptions":[]},"void setXAttr(org.apache.hadoop.fs.Path, java.lang.String, [B) throws java.io.IOException":{"name":"setXAttr","returnType":"void","args":["org.apache.hadoop.fs.Path","java.lang.String","[B"],"exceptions":["java.io.IOException"]},"boolean cancelDeleteOnExit(org.apache.hadoop.fs.Path)":{"name":"cancelDeleteOnExit","returnType":"boolean","args":["org.apache.hadoop.fs.Path"],"exceptions":[]},"void copyToLocalFile(boolean, org.apache.hadoop.fs.Path, org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"copyToLocalFile","returnType":"void","args":["boolean","org.apache.hadoop.fs.Path","org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"[Lorg.apache.hadoop.fs.BlockLocation; getFileBlockLocations(org.apache.hadoop.fs.FileStatus, long, long) throws java.io.IOException":{"name":"getFileBlockLocations","returnType":"[Lorg.apache.hadoop.fs.BlockLocation;","args":["org.apache.hadoop.fs.FileStatus","long","long"],"exceptions":["java.io.IOException"]},"[Lorg.apache.hadoop.fs.FileStatus; listStatus(org.apache.hadoop.fs.Path, org.apache.hadoop.fs.PathFilter) throws java.io.IOException, java.io.FileNotFoundException":{"name":"listStatus","returnType":"[Lorg.apache.hadoop.fs.FileStatus;","args":["org.apache.hadoop.fs.Path","org.apache.hadoop.fs.PathFilter"],"exceptions":["java.io.IOException","java.io.FileNotFoundException"]},"void removeDefaultAcl(org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"removeDefaultAcl","returnType":"void","args":["org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.FileSystem getNamed(java.lang.String, org.apache.hadoop.conf.Configuration) throws java.io.IOException":{"name":"getNamed","returnType":"org.apache.hadoop.fs.FileSystem","args":["java.lang.String","org.apache.hadoop.conf.Configuration"],"exceptions":["java.io.IOException"]},"[Lorg.apache.hadoop.fs.FileStatus; listStatus([Lorg.apache.hadoop.fs.Path;, org.apache.hadoop.fs.PathFilter) throws java.io.IOException, java.io.FileNotFoundException":{"name":"listStatus","returnType":"[Lorg.apache.hadoop.fs.FileStatus;","args":["[Lorg.apache.hadoop.fs.Path;","org.apache.hadoop.fs.PathFilter"],"exceptions":["java.io.IOException","java.io.FileNotFoundException"]},"long getDefaultBlockSize(org.apache.hadoop.fs.Path)":{"name":"getDefaultBlockSize","returnType":"long","args":["org.apache.hadoop.fs.Path"],"exceptions":[]},"void concat(org.apache.hadoop.fs.Path, [Lorg.apache.hadoop.fs.Path;) throws java.io.IOException":{"name":"concat","returnType":"void","args":["org.apache.hadoop.fs.Path","[Lorg.apache.hadoop.fs.Path;"],"exceptions":["java.io.IOException"]},"void initialize(java.net.URI, org.apache.hadoop.conf.Configuration) throws java.io.IOException":{"name":"initialize","returnType":"void","args":["java.net.URI","org.apache.hadoop.conf.Configuration"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.FSDataOutputStream create(org.apache.hadoop.fs.Path, boolean, int) throws java.io.IOException":{"name":"create","returnType":"org.apache.hadoop.fs.FSDataOutputStream","args":["org.apache.hadoop.fs.Path","boolean","int"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.FSDataOutputStream create(org.apache.hadoop.fs.Path, short, org.apache.hadoop.util.Progressable) throws java.io.IOException":{"name":"create","returnType":"org.apache.hadoop.fs.FSDataOutputStream","args":["org.apache.hadoop.fs.Path","short","org.apache.hadoop.util.Progressable"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.Path makeQualified(org.apache.hadoop.fs.Path)":{"name":"makeQualified","returnType":"org.apache.hadoop.fs.Path","args":["org.apache.hadoop.fs.Path"],"exceptions":[]},"org.apache.hadoop.fs.FSDataOutputStream create(org.apache.hadoop.fs.Path, short) throws java.io.IOException":{"name":"create","returnType":"org.apache.hadoop.fs.FSDataOutputStream","args":["org.apache.hadoop.fs.Path","short"],"exceptions":["java.io.IOException"]},"void setTimes(org.apache.hadoop.fs.Path, long, long) throws java.io.IOException":{"name":"setTimes","returnType":"void","args":["org.apache.hadoop.fs.Path","long","long"],"exceptions":["java.io.IOException"]},"long getUsed() throws java.io.IOException":{"name":"getUsed","returnType":"long","args":[],"exceptions":["java.io.IOException"]},"void moveFromLocalFile([Lorg.apache.hadoop.fs.Path;, org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"moveFromLocalFile","returnType":"void","args":["[Lorg.apache.hadoop.fs.Path;","org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"java.net.URI getDefaultUri(org.apache.hadoop.conf.Configuration)":{"name":"getDefaultUri","returnType":"java.net.URI","args":["org.apache.hadoop.conf.Configuration"],"exceptions":[]},"org.apache.hadoop.fs.FSDataOutputStream create(org.apache.hadoop.fs.Path, boolean, int, short, long) throws java.io.IOException":{"name":"create","returnType":"org.apache.hadoop.fs.FSDataOutputStream","args":["org.apache.hadoop.fs.Path","boolean","int","short","long"],"exceptions":["java.io.IOException"]},"java.util.Map getXAttrs(org.apache.hadoop.fs.Path, java.util.List) throws java.io.IOException":{"name":"getXAttrs","returnType":"java.util.Map","args":["org.apache.hadoop.fs.Path","java.util.List"],"exceptions":["java.io.IOException"]},"void setVerifyChecksum(boolean)":{"name":"setVerifyChecksum","returnType":"void","args":["boolean"],"exceptions":[]},"org.apache.hadoop.fs.FileStatus getFileLinkStatus(org.apache.hadoop.fs.Path) throws org.apache.hadoop.security.AccessControlException, org.apache.hadoop.fs.UnsupportedFileSystemException, java.io.IOException, java.io.FileNotFoundException":{"name":"getFileLinkStatus","returnType":"org.apache.hadoop.fs.FileStatus","args":["org.apache.hadoop.fs.Path"],"exceptions":["org.apache.hadoop.security.AccessControlException","org.apache.hadoop.fs.UnsupportedFileSystemException","java.io.IOException","java.io.FileNotFoundException"]},"void copyFromLocalFile(boolean, boolean, org.apache.hadoop.fs.Path, org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"copyFromLocalFile","returnType":"void","args":["boolean","boolean","org.apache.hadoop.fs.Path","org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"[Lorg.apache.hadoop.fs.FileStatus; globStatus(org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"globStatus","returnType":"[Lorg.apache.hadoop.fs.FileStatus;","args":["org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.FSDataOutputStream createNonRecursive(org.apache.hadoop.fs.Path, org.apache.hadoop.fs.permission.FsPermission, java.util.EnumSet, int, short, long, org.apache.hadoop.util.Progressable) throws java.io.IOException":{"name":"createNonRecursive","returnType":"org.apache.hadoop.fs.FSDataOutputStream","args":["org.apache.hadoop.fs.Path","org.apache.hadoop.fs.permission.FsPermission","java.util.EnumSet","int","short","long","org.apache.hadoop.util.Progressable"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.Path getLinkTarget(org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"getLinkTarget","returnType":"org.apache.hadoop.fs.Path","args":["org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.FileSystem get(java.net.URI, org.apache.hadoop.conf.Configuration, java.lang.String) throws java.lang.InterruptedException, java.io.IOException":{"name":"get","returnType":"org.apache.hadoop.fs.FileSystem","args":["java.net.URI","org.apache.hadoop.conf.Configuration","java.lang.String"],"exceptions":["java.lang.InterruptedException","java.io.IOException"]},"void createSymlink(org.apache.hadoop.fs.Path, org.apache.hadoop.fs.Path, boolean) throws org.apache.hadoop.fs.FileAlreadyExistsException, org.apache.hadoop.security.AccessControlException, org.apache.hadoop.fs.ParentNotDirectoryException, org.apache.hadoop.fs.UnsupportedFileSystemException, java.io.IOException, java.io.FileNotFoundException":{"name":"createSymlink","returnType":"void","args":["org.apache.hadoop.fs.Path","org.apache.hadoop.fs.Path","boolean"],"exceptions":["org.apache.hadoop.fs.FileAlreadyExistsException","org.apache.hadoop.security.AccessControlException","org.apache.hadoop.fs.ParentNotDirectoryException","org.apache.hadoop.fs.UnsupportedFileSystemException","java.io.IOException","java.io.FileNotFoundException"]},"org.apache.hadoop.fs.FSDataOutputStream append(org.apache.hadoop.fs.Path, int) throws java.io.IOException":{"name":"append","returnType":"org.apache.hadoop.fs.FSDataOutputStream","args":["org.apache.hadoop.fs.Path","int"],"exceptions":["java.io.IOException"]},"boolean delete(org.apache.hadoop.fs.Path, boolean) throws java.io.IOException":{"name":"delete","returnType":"boolean","args":["org.apache.hadoop.fs.Path","boolean"],"exceptions":["java.io.IOException"]},"void copyFromLocalFile(boolean, org.apache.hadoop.fs.Path, org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"copyFromLocalFile","returnType":"void","args":["boolean","org.apache.hadoop.fs.Path","org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"java.util.List getAllStatistics()":{"name":"getAllStatistics","returnType":"java.util.List","args":[],"exceptions":[]},"void access(org.apache.hadoop.fs.Path, org.apache.hadoop.fs.permission.FsAction) throws org.apache.hadoop.security.AccessControlException, java.io.IOException, java.io.FileNotFoundException":{"name":"access","returnType":"void","args":["org.apache.hadoop.fs.Path","org.apache.hadoop.fs.permission.FsAction"],"exceptions":["org.apache.hadoop.security.AccessControlException","java.io.IOException","java.io.FileNotFoundException"]},"org.apache.hadoop.fs.FSDataOutputStream create(org.apache.hadoop.fs.Path, boolean) throws java.io.IOException":{"name":"create","returnType":"org.apache.hadoop.fs.FSDataOutputStream","args":["org.apache.hadoop.fs.Path","boolean"],"exceptions":["java.io.IOException"]},"void removeAclEntries(org.apache.hadoop.fs.Path, java.util.List) throws java.io.IOException":{"name":"removeAclEntries","returnType":"void","args":["org.apache.hadoop.fs.Path","java.util.List"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.FSDataOutputStream create(org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"create","returnType":"org.apache.hadoop.fs.FSDataOutputStream","args":["org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"void closeAllForUGI(org.apache.hadoop.security.UserGroupInformation) throws java.io.IOException":{"name":"closeAllForUGI","returnType":"void","args":["org.apache.hadoop.security.UserGroupInformation"],"exceptions":["java.io.IOException"]},"void setDefaultUri(org.apache.hadoop.conf.Configuration, java.net.URI)":{"name":"setDefaultUri","returnType":"void","args":["org.apache.hadoop.conf.Configuration","java.net.URI"],"exceptions":[]},"org.apache.hadoop.fs.FSDataOutputStream create(org.apache.hadoop.fs.Path, boolean, int, org.apache.hadoop.util.Progressable) throws java.io.IOException":{"name":"create","returnType":"org.apache.hadoop.fs.FSDataOutputStream","args":["org.apache.hadoop.fs.Path","boolean","int","org.apache.hadoop.util.Progressable"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.FSDataOutputStream append(org.apache.hadoop.fs.Path, int, org.apache.hadoop.util.Progressable) throws java.io.IOException":{"name":"append","returnType":"org.apache.hadoop.fs.FSDataOutputStream","args":["org.apache.hadoop.fs.Path","int","org.apache.hadoop.util.Progressable"],"exceptions":["java.io.IOException"]},"void clearStatistics()":{"name":"clearStatistics","returnType":"void","args":[],"exceptions":[]},"org.apache.hadoop.fs.FileChecksum getFileChecksum(org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"getFileChecksum","returnType":"org.apache.hadoop.fs.FileChecksum","args":["org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"void removeXAttr(org.apache.hadoop.fs.Path, java.lang.String) throws java.io.IOException":{"name":"removeXAttr","returnType":"void","args":["org.apache.hadoop.fs.Path","java.lang.String"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.FileSystem newInstance(java.net.URI, org.apache.hadoop.conf.Configuration) throws java.io.IOException":{"name":"newInstance","returnType":"org.apache.hadoop.fs.FileSystem","args":["java.net.URI","org.apache.hadoop.conf.Configuration"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.Path getWorkingDirectory()":{"name":"getWorkingDirectory","returnType":"org.apache.hadoop.fs.Path","args":[],"exceptions":[]},"org.apache.hadoop.fs.FileSystem get(java.net.URI, org.apache.hadoop.conf.Configuration) throws java.io.IOException":{"name":"get","returnType":"org.apache.hadoop.fs.FileSystem","args":["java.net.URI","org.apache.hadoop.conf.Configuration"],"exceptions":["java.io.IOException"]},"java.util.List listXAttrs(org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"listXAttrs","returnType":"java.util.List","args":["org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.FileSystem newInstance(org.apache.hadoop.conf.Configuration) throws java.io.IOException":{"name":"newInstance","returnType":"org.apache.hadoop.fs.FileSystem","args":["org.apache.hadoop.conf.Configuration"],"exceptions":["java.io.IOException"]},"void setWorkingDirectory(org.apache.hadoop.fs.Path)":{"name":"setWorkingDirectory","returnType":"void","args":["org.apache.hadoop.fs.Path"],"exceptions":[]},"void copyFromLocalFile(boolean, boolean, [Lorg.apache.hadoop.fs.Path;, org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"copyFromLocalFile","returnType":"void","args":["boolean","boolean","[Lorg.apache.hadoop.fs.Path;","org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.Path resolvePath(org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"resolvePath","returnType":"org.apache.hadoop.fs.Path","args":["org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"java.util.Map getXAttrs(org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"getXAttrs","returnType":"java.util.Map","args":["org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"long getLength(org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"getLength","returnType":"long","args":["org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"java.lang.String getCanonicalServiceName()":{"name":"getCanonicalServiceName","returnType":"java.lang.String","args":[],"exceptions":[]},"long getBlockSize(org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"getBlockSize","returnType":"long","args":["org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"short getReplication(org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"getReplication","returnType":"short","args":["org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"void modifyAclEntries(org.apache.hadoop.fs.Path, java.util.List) throws java.io.IOException":{"name":"modifyAclEntries","returnType":"void","args":["org.apache.hadoop.fs.Path","java.util.List"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.RemoteIterator listLocatedStatus(org.apache.hadoop.fs.Path) throws java.io.IOException, java.io.FileNotFoundException":{"name":"listLocatedStatus","returnType":"org.apache.hadoop.fs.RemoteIterator","args":["org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException","java.io.FileNotFoundException"]},"org.apache.hadoop.fs.FSDataInputStream open(org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"open","returnType":"org.apache.hadoop.fs.FSDataInputStream","args":["org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"[Lorg.apache.hadoop.fs.FileStatus; listStatus(org.apache.hadoop.fs.Path) throws java.io.IOException, java.io.FileNotFoundException":{"name":"listStatus","returnType":"[Lorg.apache.hadoop.fs.FileStatus;","args":["org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException","java.io.FileNotFoundException"]},"org.apache.hadoop.fs.LocalFileSystem newInstanceLocal(org.apache.hadoop.conf.Configuration) throws java.io.IOException":{"name":"newInstanceLocal","returnType":"org.apache.hadoop.fs.LocalFileSystem","args":["org.apache.hadoop.conf.Configuration"],"exceptions":["java.io.IOException"]},"boolean exists(org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"exists","returnType":"boolean","args":["org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"void setXAttr(org.apache.hadoop.fs.Path, java.lang.String, [B, java.util.EnumSet) throws java.io.IOException":{"name":"setXAttr","returnType":"void","args":["org.apache.hadoop.fs.Path","java.lang.String","[B","java.util.EnumSet"],"exceptions":["java.io.IOException"]},"boolean supportsSymlinks()":{"name":"supportsSymlinks","returnType":"boolean","args":[],"exceptions":[]},"java.lang.String getName()":{"name":"getName","returnType":"java.lang.String","args":[],"exceptions":[]},"org.apache.hadoop.fs.FSDataOutputStream create(org.apache.hadoop.fs.Path, org.apache.hadoop.fs.permission.FsPermission, java.util.EnumSet, int, short, long, org.apache.hadoop.util.Progressable, org.apache.hadoop.fs.Options$ChecksumOpt) throws java.io.IOException":{"name":"create","returnType":"org.apache.hadoop.fs.FSDataOutputStream","args":["org.apache.hadoop.fs.Path","org.apache.hadoop.fs.permission.FsPermission","java.util.EnumSet","int","short","long","org.apache.hadoop.util.Progressable","org.apache.hadoop.fs.Options$ChecksumOpt"],"exceptions":["java.io.IOException"]},"boolean truncate(org.apache.hadoop.fs.Path, long) throws java.io.IOException":{"name":"truncate","returnType":"boolean","args":["org.apache.hadoop.fs.Path","long"],"exceptions":["java.io.IOException"]},"void closeAll() throws java.io.IOException":{"name":"closeAll","returnType":"void","args":[],"exceptions":["java.io.IOException"]},"[Lorg.apache.hadoop.fs.FileStatus; listStatus([Lorg.apache.hadoop.fs.Path;) throws java.io.IOException, java.io.FileNotFoundException":{"name":"listStatus","returnType":"[Lorg.apache.hadoop.fs.FileStatus;","args":["[Lorg.apache.hadoop.fs.Path;"],"exceptions":["java.io.IOException","java.io.FileNotFoundException"]},"org.apache.hadoop.fs.FSDataOutputStream create(org.apache.hadoop.fs.Path, org.apache.hadoop.fs.permission.FsPermission, boolean, int, short, long, org.apache.hadoop.util.Progressable) throws java.io.IOException":{"name":"create","returnType":"org.apache.hadoop.fs.FSDataOutputStream","args":["org.apache.hadoop.fs.Path","org.apache.hadoop.fs.permission.FsPermission","boolean","int","short","long","org.apache.hadoop.util.Progressable"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.FsServerDefaults getServerDefaults() throws java.io.IOException":{"name":"getServerDefaults","returnType":"org.apache.hadoop.fs.FsServerDefaults","args":[],"exceptions":["java.io.IOException"]},"void close() throws java.io.IOException":{"name":"close","returnType":"void","args":[],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.fs.BlockLocation":{"name":"org.apache.hadoop.fs.BlockLocation","methods":{"[Ljava.lang.String; getCachedHosts()":{"name":"getCachedHosts","returnType":"[Ljava.lang.String;","args":[],"exceptions":[]},"void setTopologyPaths([Ljava.lang.String;) throws java.io.IOException":{"name":"setTopologyPaths","returnType":"void","args":["[Ljava.lang.String;"],"exceptions":["java.io.IOException"]},"void setHosts([Ljava.lang.String;) throws java.io.IOException":{"name":"setHosts","returnType":"void","args":["[Ljava.lang.String;"],"exceptions":["java.io.IOException"]},"void setCorrupt(boolean)":{"name":"setCorrupt","returnType":"void","args":["boolean"],"exceptions":[]},"[Ljava.lang.String; getNames() throws java.io.IOException":{"name":"getNames","returnType":"[Ljava.lang.String;","args":[],"exceptions":["java.io.IOException"]},"[Ljava.lang.String; getTopologyPaths() throws java.io.IOException":{"name":"getTopologyPaths","returnType":"[Ljava.lang.String;","args":[],"exceptions":["java.io.IOException"]},"long getLength()":{"name":"getLength","returnType":"long","args":[],"exceptions":[]},"java.lang.String toString()":{"name":"toString","returnType":"java.lang.String","args":[],"exceptions":[]},"long getOffset()":{"name":"getOffset","returnType":"long","args":[],"exceptions":[]},"void setOffset(long)":{"name":"setOffset","returnType":"void","args":["long"],"exceptions":[]},"void setNames([Ljava.lang.String;) throws java.io.IOException":{"name":"setNames","returnType":"void","args":["[Ljava.lang.String;"],"exceptions":["java.io.IOException"]},"void setLength(long)":{"name":"setLength","returnType":"void","args":["long"],"exceptions":[]},"[Ljava.lang.String; getHosts() throws java.io.IOException":{"name":"getHosts","returnType":"[Ljava.lang.String;","args":[],"exceptions":["java.io.IOException"]},"boolean isCorrupt()":{"name":"isCorrupt","returnType":"boolean","args":[],"exceptions":[]},"void setCachedHosts([Ljava.lang.String;)":{"name":"setCachedHosts","returnType":"void","args":["[Ljava.lang.String;"],"exceptions":[]}}},"org.apache.hadoop.io.Text":{"name":"org.apache.hadoop.io.Text","methods":{"java.lang.String readString(java.io.DataInput, int) throws java.io.IOException":{"name":"readString","returnType":"java.lang.String","args":["java.io.DataInput","int"],"exceptions":["java.io.IOException"]},"int hashCode()":{"name":"hashCode","returnType":"int","args":[],"exceptions":[]},"void set(org.apache.hadoop.io.Text)":{"name":"set","returnType":"void","args":["org.apache.hadoop.io.Text"],"exceptions":[]},"void validateUTF8([B, int, int) throws java.nio.charset.MalformedInputException":{"name":"validateUTF8","returnType":"void","args":["[B","int","int"],"exceptions":["java.nio.charset.MalformedInputException"]},"int getLength()":{"name":"getLength","returnType":"int","args":[],"exceptions":[]},"void readFields(java.io.DataInput, int) throws java.io.IOException":{"name":"readFields","returnType":"void","args":["java.io.DataInput","int"],"exceptions":["java.io.IOException"]},"void set([B, int, int)":{"name":"set","returnType":"void","args":["[B","int","int"],"exceptions":[]},"int bytesToCodePoint(java.nio.ByteBuffer)":{"name":"bytesToCodePoint","returnType":"int","args":["java.nio.ByteBuffer"],"exceptions":[]},"boolean equals(java.lang.Object)":{"name":"equals","returnType":"boolean","args":["java.lang.Object"],"exceptions":[]},"void append([B, int, int)":{"name":"append","returnType":"void","args":["[B","int","int"],"exceptions":[]},"java.lang.String decode([B, int, int, boolean) throws java.nio.charset.CharacterCodingException":{"name":"decode","returnType":"java.lang.String","args":["[B","int","int","boolean"],"exceptions":["java.nio.charset.CharacterCodingException"]},"java.nio.ByteBuffer encode(java.lang.String, boolean) throws java.nio.charset.CharacterCodingException":{"name":"encode","returnType":"java.nio.ByteBuffer","args":["java.lang.String","boolean"],"exceptions":["java.nio.charset.CharacterCodingException"]},"int writeString(java.io.DataOutput, java.lang.String, int) throws java.io.IOException":{"name":"writeString","returnType":"int","args":["java.io.DataOutput","java.lang.String","int"],"exceptions":["java.io.IOException"]},"java.lang.String toString()":{"name":"toString","returnType":"java.lang.String","args":[],"exceptions":[]},"void write(java.io.DataOutput) throws java.io.IOException":{"name":"write","returnType":"void","args":["java.io.DataOutput"],"exceptions":["java.io.IOException"]},"java.lang.String decode([B, int, int) throws java.nio.charset.CharacterCodingException":{"name":"decode","returnType":"java.lang.String","args":["[B","int","int"],"exceptions":["java.nio.charset.CharacterCodingException"]},"java.nio.ByteBuffer encode(java.lang.String) throws java.nio.charset.CharacterCodingException":{"name":"encode","returnType":"java.nio.ByteBuffer","args":["java.lang.String"],"exceptions":["java.nio.charset.CharacterCodingException"]},"int writeString(java.io.DataOutput, java.lang.String) throws java.io.IOException":{"name":"writeString","returnType":"int","args":["java.io.DataOutput","java.lang.String"],"exceptions":["java.io.IOException"]},"[B getBytes()":{"name":"getBytes","returnType":"[B","args":[],"exceptions":[]},"void clear()":{"name":"clear","returnType":"void","args":[],"exceptions":[]},"void write(java.io.DataOutput, int) throws java.io.IOException":{"name":"write","returnType":"void","args":["java.io.DataOutput","int"],"exceptions":["java.io.IOException"]},"void set(java.lang.String)":{"name":"set","returnType":"void","args":["java.lang.String"],"exceptions":[]},"int utf8Length(java.lang.String)":{"name":"utf8Length","returnType":"int","args":["java.lang.String"],"exceptions":[]},"void readWithKnownLength(java.io.DataInput, int) throws java.io.IOException":{"name":"readWithKnownLength","returnType":"void","args":["java.io.DataInput","int"],"exceptions":["java.io.IOException"]},"java.lang.String readString(java.io.DataInput) throws java.io.IOException":{"name":"readString","returnType":"java.lang.String","args":["java.io.DataInput"],"exceptions":["java.io.IOException"]},"java.lang.String decode([B) throws java.nio.charset.CharacterCodingException":{"name":"decode","returnType":"java.lang.String","args":["[B"],"exceptions":["java.nio.charset.CharacterCodingException"]},"void skip(java.io.DataInput) throws java.io.IOException":{"name":"skip","returnType":"void","args":["java.io.DataInput"],"exceptions":["java.io.IOException"]},"int charAt(int)":{"name":"charAt","returnType":"int","args":["int"],"exceptions":[]},"int find(java.lang.String, int)":{"name":"find","returnType":"int","args":["java.lang.String","int"],"exceptions":[]},"void set([B)":{"name":"set","returnType":"void","args":["[B"],"exceptions":[]},"int find(java.lang.String)":{"name":"find","returnType":"int","args":["java.lang.String"],"exceptions":[]},"[B copyBytes()":{"name":"copyBytes","returnType":"[B","args":[],"exceptions":[]},"void validateUTF8([B) throws java.nio.charset.MalformedInputException":{"name":"validateUTF8","returnType":"void","args":["[B"],"exceptions":["java.nio.charset.MalformedInputException"]},"void readFields(java.io.DataInput) throws java.io.IOException":{"name":"readFields","returnType":"void","args":["java.io.DataInput"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.io.Writable":{"name":"org.apache.hadoop.io.Writable","methods":{"void write(java.io.DataOutput) throws java.io.IOException":{"name":"write","returnType":"void","args":["java.io.DataOutput"],"exceptions":["java.io.IOException"]},"void readFields(java.io.DataInput) throws java.io.IOException":{"name":"readFields","returnType":"void","args":["java.io.DataInput"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.io.VLongWritable":{"name":"org.apache.hadoop.io.VLongWritable","methods":{"int hashCode()":{"name":"hashCode","returnType":"int","args":[],"exceptions":[]},"void set(long)":{"name":"set","returnType":"void","args":["long"],"exceptions":[]},"long get()":{"name":"get","returnType":"long","args":[],"exceptions":[]},"int compareTo(org.apache.hadoop.io.VLongWritable)":{"name":"compareTo","returnType":"int","args":["org.apache.hadoop.io.VLongWritable"],"exceptions":[]},"java.lang.String toString()":{"name":"toString","returnType":"java.lang.String","args":[],"exceptions":[]},"void write(java.io.DataOutput) throws java.io.IOException":{"name":"write","returnType":"void","args":["java.io.DataOutput"],"exceptions":["java.io.IOException"]},"int compareTo(java.lang.Object)":{"name":"compareTo","returnType":"int","args":["java.lang.Object"],"exceptions":[]},"boolean equals(java.lang.Object)":{"name":"equals","returnType":"boolean","args":["java.lang.Object"],"exceptions":[]},"void readFields(java.io.DataInput) throws java.io.IOException":{"name":"readFields","returnType":"void","args":["java.io.DataInput"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.io.VersionedWritable":{"name":"org.apache.hadoop.io.VersionedWritable","methods":{"void write(java.io.DataOutput) throws java.io.IOException":{"name":"write","returnType":"void","args":["java.io.DataOutput"],"exceptions":["java.io.IOException"]},"byte getVersion()":{"name":"getVersion","returnType":"byte","args":[],"exceptions":[]},"void readFields(java.io.DataInput) throws java.io.IOException":{"name":"readFields","returnType":"void","args":["java.io.DataInput"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.io.SequenceFile":{"name":"org.apache.hadoop.io.SequenceFile","methods":{"org.apache.hadoop.io.SequenceFile$Writer createWriter(org.apache.hadoop.fs.FileSystem, org.apache.hadoop.conf.Configuration, org.apache.hadoop.fs.Path, java.lang.Class, java.lang.Class, org.apache.hadoop.io.SequenceFile$CompressionType) throws java.io.IOException":{"name":"createWriter","returnType":"org.apache.hadoop.io.SequenceFile$Writer","args":["org.apache.hadoop.fs.FileSystem","org.apache.hadoop.conf.Configuration","org.apache.hadoop.fs.Path","java.lang.Class","java.lang.Class","org.apache.hadoop.io.SequenceFile$CompressionType"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.io.SequenceFile$Writer createWriter(org.apache.hadoop.fs.FileContext, org.apache.hadoop.conf.Configuration, org.apache.hadoop.fs.Path, java.lang.Class, java.lang.Class, org.apache.hadoop.io.SequenceFile$CompressionType, org.apache.hadoop.io.compress.CompressionCodec, org.apache.hadoop.io.SequenceFile$Metadata, java.util.EnumSet, [Lorg.apache.hadoop.fs.Options$CreateOpts;) throws java.io.IOException":{"name":"createWriter","returnType":"org.apache.hadoop.io.SequenceFile$Writer","args":["org.apache.hadoop.fs.FileContext","org.apache.hadoop.conf.Configuration","org.apache.hadoop.fs.Path","java.lang.Class","java.lang.Class","org.apache.hadoop.io.SequenceFile$CompressionType","org.apache.hadoop.io.compress.CompressionCodec","org.apache.hadoop.io.SequenceFile$Metadata","java.util.EnumSet","[Lorg.apache.hadoop.fs.Options$CreateOpts;"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.io.SequenceFile$Writer createWriter(org.apache.hadoop.conf.Configuration, [Lorg.apache.hadoop.io.SequenceFile$Writer$Option;) throws java.io.IOException":{"name":"createWriter","returnType":"org.apache.hadoop.io.SequenceFile$Writer","args":["org.apache.hadoop.conf.Configuration","[Lorg.apache.hadoop.io.SequenceFile$Writer$Option;"],"exceptions":["java.io.IOException"]},"void setDefaultCompressionType(org.apache.hadoop.conf.Configuration, org.apache.hadoop.io.SequenceFile$CompressionType)":{"name":"setDefaultCompressionType","returnType":"void","args":["org.apache.hadoop.conf.Configuration","org.apache.hadoop.io.SequenceFile$CompressionType"],"exceptions":[]},"org.apache.hadoop.io.SequenceFile$Writer createWriter(org.apache.hadoop.fs.FileSystem, org.apache.hadoop.conf.Configuration, org.apache.hadoop.fs.Path, java.lang.Class, java.lang.Class, int, short, long, org.apache.hadoop.io.SequenceFile$CompressionType, org.apache.hadoop.io.compress.CompressionCodec, org.apache.hadoop.util.Progressable, org.apache.hadoop.io.SequenceFile$Metadata) throws java.io.IOException":{"name":"createWriter","returnType":"org.apache.hadoop.io.SequenceFile$Writer","args":["org.apache.hadoop.fs.FileSystem","org.apache.hadoop.conf.Configuration","org.apache.hadoop.fs.Path","java.lang.Class","java.lang.Class","int","short","long","org.apache.hadoop.io.SequenceFile$CompressionType","org.apache.hadoop.io.compress.CompressionCodec","org.apache.hadoop.util.Progressable","org.apache.hadoop.io.SequenceFile$Metadata"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.io.SequenceFile$Writer createWriter(org.apache.hadoop.conf.Configuration, org.apache.hadoop.fs.FSDataOutputStream, java.lang.Class, java.lang.Class, org.apache.hadoop.io.SequenceFile$CompressionType, org.apache.hadoop.io.compress.CompressionCodec, org.apache.hadoop.io.SequenceFile$Metadata) throws java.io.IOException":{"name":"createWriter","returnType":"org.apache.hadoop.io.SequenceFile$Writer","args":["org.apache.hadoop.conf.Configuration","org.apache.hadoop.fs.FSDataOutputStream","java.lang.Class","java.lang.Class","org.apache.hadoop.io.SequenceFile$CompressionType","org.apache.hadoop.io.compress.CompressionCodec","org.apache.hadoop.io.SequenceFile$Metadata"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.io.SequenceFile$Writer createWriter(org.apache.hadoop.conf.Configuration, org.apache.hadoop.fs.FSDataOutputStream, java.lang.Class, java.lang.Class, org.apache.hadoop.io.SequenceFile$CompressionType, org.apache.hadoop.io.compress.CompressionCodec) throws java.io.IOException":{"name":"createWriter","returnType":"org.apache.hadoop.io.SequenceFile$Writer","args":["org.apache.hadoop.conf.Configuration","org.apache.hadoop.fs.FSDataOutputStream","java.lang.Class","java.lang.Class","org.apache.hadoop.io.SequenceFile$CompressionType","org.apache.hadoop.io.compress.CompressionCodec"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.io.SequenceFile$Writer createWriter(org.apache.hadoop.fs.FileSystem, org.apache.hadoop.conf.Configuration, org.apache.hadoop.fs.Path, java.lang.Class, java.lang.Class, org.apache.hadoop.io.SequenceFile$CompressionType, org.apache.hadoop.io.compress.CompressionCodec, org.apache.hadoop.util.Progressable) throws java.io.IOException":{"name":"createWriter","returnType":"org.apache.hadoop.io.SequenceFile$Writer","args":["org.apache.hadoop.fs.FileSystem","org.apache.hadoop.conf.Configuration","org.apache.hadoop.fs.Path","java.lang.Class","java.lang.Class","org.apache.hadoop.io.SequenceFile$CompressionType","org.apache.hadoop.io.compress.CompressionCodec","org.apache.hadoop.util.Progressable"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.io.SequenceFile$Writer createWriter(org.apache.hadoop.fs.FileSystem, org.apache.hadoop.conf.Configuration, org.apache.hadoop.fs.Path, java.lang.Class, java.lang.Class, int, short, long, boolean, org.apache.hadoop.io.SequenceFile$CompressionType, org.apache.hadoop.io.compress.CompressionCodec, org.apache.hadoop.io.SequenceFile$Metadata) throws java.io.IOException":{"name":"createWriter","returnType":"org.apache.hadoop.io.SequenceFile$Writer","args":["org.apache.hadoop.fs.FileSystem","org.apache.hadoop.conf.Configuration","org.apache.hadoop.fs.Path","java.lang.Class","java.lang.Class","int","short","long","boolean","org.apache.hadoop.io.SequenceFile$CompressionType","org.apache.hadoop.io.compress.CompressionCodec","org.apache.hadoop.io.SequenceFile$Metadata"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.io.SequenceFile$Writer createWriter(org.apache.hadoop.fs.FileSystem, org.apache.hadoop.conf.Configuration, org.apache.hadoop.fs.Path, java.lang.Class, java.lang.Class) throws java.io.IOException":{"name":"createWriter","returnType":"org.apache.hadoop.io.SequenceFile$Writer","args":["org.apache.hadoop.fs.FileSystem","org.apache.hadoop.conf.Configuration","org.apache.hadoop.fs.Path","java.lang.Class","java.lang.Class"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.io.SequenceFile$Writer createWriter(org.apache.hadoop.fs.FileSystem, org.apache.hadoop.conf.Configuration, org.apache.hadoop.fs.Path, java.lang.Class, java.lang.Class, org.apache.hadoop.io.SequenceFile$CompressionType, org.apache.hadoop.util.Progressable) throws java.io.IOException":{"name":"createWriter","returnType":"org.apache.hadoop.io.SequenceFile$Writer","args":["org.apache.hadoop.fs.FileSystem","org.apache.hadoop.conf.Configuration","org.apache.hadoop.fs.Path","java.lang.Class","java.lang.Class","org.apache.hadoop.io.SequenceFile$CompressionType","org.apache.hadoop.util.Progressable"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.io.SequenceFile$Writer createWriter(org.apache.hadoop.fs.FileSystem, org.apache.hadoop.conf.Configuration, org.apache.hadoop.fs.Path, java.lang.Class, java.lang.Class, org.apache.hadoop.io.SequenceFile$CompressionType, org.apache.hadoop.io.compress.CompressionCodec, org.apache.hadoop.util.Progressable, org.apache.hadoop.io.SequenceFile$Metadata) throws java.io.IOException":{"name":"createWriter","returnType":"org.apache.hadoop.io.SequenceFile$Writer","args":["org.apache.hadoop.fs.FileSystem","org.apache.hadoop.conf.Configuration","org.apache.hadoop.fs.Path","java.lang.Class","java.lang.Class","org.apache.hadoop.io.SequenceFile$CompressionType","org.apache.hadoop.io.compress.CompressionCodec","org.apache.hadoop.util.Progressable","org.apache.hadoop.io.SequenceFile$Metadata"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.io.SequenceFile$CompressionType getDefaultCompressionType(org.apache.hadoop.conf.Configuration)":{"name":"getDefaultCompressionType","returnType":"org.apache.hadoop.io.SequenceFile$CompressionType","args":["org.apache.hadoop.conf.Configuration"],"exceptions":[]},"org.apache.hadoop.io.SequenceFile$Writer createWriter(org.apache.hadoop.fs.FileSystem, org.apache.hadoop.conf.Configuration, org.apache.hadoop.fs.Path, java.lang.Class, java.lang.Class, org.apache.hadoop.io.SequenceFile$CompressionType, org.apache.hadoop.io.compress.CompressionCodec) throws java.io.IOException":{"name":"createWriter","returnType":"org.apache.hadoop.io.SequenceFile$Writer","args":["org.apache.hadoop.fs.FileSystem","org.apache.hadoop.conf.Configuration","org.apache.hadoop.fs.Path","java.lang.Class","java.lang.Class","org.apache.hadoop.io.SequenceFile$CompressionType","org.apache.hadoop.io.compress.CompressionCodec"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.io.file.tfile.MetaBlockAlreadyExists":{"name":"org.apache.hadoop.io.file.tfile.MetaBlockAlreadyExists","methods":{}},"org.apache.hadoop.fs.FileStatus":{"name":"org.apache.hadoop.fs.FileStatus","methods":{"org.apache.hadoop.fs.permission.FsPermission getPermission()":{"name":"getPermission","returnType":"org.apache.hadoop.fs.permission.FsPermission","args":[],"exceptions":[]},"int hashCode()":{"name":"hashCode","returnType":"int","args":[],"exceptions":[]},"boolean isFile()":{"name":"isFile","returnType":"boolean","args":[],"exceptions":[]},"long getBlockSize()":{"name":"getBlockSize","returnType":"long","args":[],"exceptions":[]},"java.lang.String getOwner()":{"name":"getOwner","returnType":"java.lang.String","args":[],"exceptions":[]},"org.apache.hadoop.fs.Path getPath()":{"name":"getPath","returnType":"org.apache.hadoop.fs.Path","args":[],"exceptions":[]},"void setSymlink(org.apache.hadoop.fs.Path)":{"name":"setSymlink","returnType":"void","args":["org.apache.hadoop.fs.Path"],"exceptions":[]},"int compareTo(java.lang.Object)":{"name":"compareTo","returnType":"int","args":["java.lang.Object"],"exceptions":[]},"long getAccessTime()":{"name":"getAccessTime","returnType":"long","args":[],"exceptions":[]},"boolean isDir()":{"name":"isDir","returnType":"boolean","args":[],"exceptions":[]},"boolean equals(java.lang.Object)":{"name":"equals","returnType":"boolean","args":["java.lang.Object"],"exceptions":[]},"boolean isEncrypted()":{"name":"isEncrypted","returnType":"boolean","args":[],"exceptions":[]},"long getLen()":{"name":"getLen","returnType":"long","args":[],"exceptions":[]},"java.lang.String toString()":{"name":"toString","returnType":"java.lang.String","args":[],"exceptions":[]},"void write(java.io.DataOutput) throws java.io.IOException":{"name":"write","returnType":"void","args":["java.io.DataOutput"],"exceptions":["java.io.IOException"]},"void setPath(org.apache.hadoop.fs.Path)":{"name":"setPath","returnType":"void","args":["org.apache.hadoop.fs.Path"],"exceptions":[]},"org.apache.hadoop.fs.Path getSymlink() throws java.io.IOException":{"name":"getSymlink","returnType":"org.apache.hadoop.fs.Path","args":[],"exceptions":["java.io.IOException"]},"short getReplication()":{"name":"getReplication","returnType":"short","args":[],"exceptions":[]},"boolean isDirectory()":{"name":"isDirectory","returnType":"boolean","args":[],"exceptions":[]},"java.lang.String getGroup()":{"name":"getGroup","returnType":"java.lang.String","args":[],"exceptions":[]},"boolean isSymlink()":{"name":"isSymlink","returnType":"boolean","args":[],"exceptions":[]},"void readFields(java.io.DataInput) throws java.io.IOException":{"name":"readFields","returnType":"void","args":["java.io.DataInput"],"exceptions":["java.io.IOException"]},"long getModificationTime()":{"name":"getModificationTime","returnType":"long","args":[],"exceptions":[]}}},"org.apache.hadoop.util.PureJavaCrc32":{"name":"org.apache.hadoop.util.PureJavaCrc32","methods":{"void update([B, int, int)":{"name":"update","returnType":"void","args":["[B","int","int"],"exceptions":[]},"long getValue()":{"name":"getValue","returnType":"long","args":[],"exceptions":[]},"void reset()":{"name":"reset","returnType":"void","args":[],"exceptions":[]},"void update(int)":{"name":"update","returnType":"void","args":["int"],"exceptions":[]}}},"org.apache.hadoop.fs.Trash":{"name":"org.apache.hadoop.fs.Trash","methods":{"java.lang.Runnable getEmptier() throws java.io.IOException":{"name":"getEmptier","returnType":"java.lang.Runnable","args":[],"exceptions":["java.io.IOException"]},"boolean moveToTrash(org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"moveToTrash","returnType":"boolean","args":["org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"void expunge() throws java.io.IOException":{"name":"expunge","returnType":"void","args":[],"exceptions":["java.io.IOException"]},"boolean moveToAppropriateTrash(org.apache.hadoop.fs.FileSystem, org.apache.hadoop.fs.Path, org.apache.hadoop.conf.Configuration) throws java.io.IOException":{"name":"moveToAppropriateTrash","returnType":"boolean","args":["org.apache.hadoop.fs.FileSystem","org.apache.hadoop.fs.Path","org.apache.hadoop.conf.Configuration"],"exceptions":["java.io.IOException"]},"void checkpoint() throws java.io.IOException":{"name":"checkpoint","returnType":"void","args":[],"exceptions":["java.io.IOException"]},"boolean isEnabled()":{"name":"isEnabled","returnType":"boolean","args":[],"exceptions":[]}}},"org.apache.hadoop.record.RecordComparator":{"name":"org.apache.hadoop.record.RecordComparator","methods":{"int compare([B, int, int, [B, int, int)":{"name":"compare","returnType":"int","args":["[B","int","int","[B","int","int"],"exceptions":[]},"void define(java.lang.Class, org.apache.hadoop.record.RecordComparator)":{"name":"define","returnType":"void","args":["java.lang.Class","org.apache.hadoop.record.RecordComparator"],"exceptions":[]}}},"org.apache.hadoop.record.meta.RecordTypeInfo":{"name":"org.apache.hadoop.record.meta.RecordTypeInfo","methods":{"void setName(java.lang.String)":{"name":"setName","returnType":"void","args":["java.lang.String"],"exceptions":[]},"java.lang.String getName()":{"name":"getName","returnType":"java.lang.String","args":[],"exceptions":[]},"java.util.Collection getFieldTypeInfos()":{"name":"getFieldTypeInfos","returnType":"java.util.Collection","args":[],"exceptions":[]},"void serialize(org.apache.hadoop.record.RecordOutput, java.lang.String) throws java.io.IOException":{"name":"serialize","returnType":"void","args":["org.apache.hadoop.record.RecordOutput","java.lang.String"],"exceptions":["java.io.IOException"]},"void deserialize(org.apache.hadoop.record.RecordInput, java.lang.String) throws java.io.IOException":{"name":"deserialize","returnType":"void","args":["org.apache.hadoop.record.RecordInput","java.lang.String"],"exceptions":["java.io.IOException"]},"void addField(java.lang.String, org.apache.hadoop.record.meta.TypeID)":{"name":"addField","returnType":"void","args":["java.lang.String","org.apache.hadoop.record.meta.TypeID"],"exceptions":[]},"int compareTo(java.lang.Object) throws java.lang.ClassCastException":{"name":"compareTo","returnType":"int","args":["java.lang.Object"],"exceptions":["java.lang.ClassCastException"]},"org.apache.hadoop.record.meta.RecordTypeInfo getNestedStructTypeInfo(java.lang.String)":{"name":"getNestedStructTypeInfo","returnType":"org.apache.hadoop.record.meta.RecordTypeInfo","args":["java.lang.String"],"exceptions":[]}}},"org.apache.hadoop.conf.Configuration":{"name":"org.apache.hadoop.conf.Configuration","methods":{"void addResource(org.apache.hadoop.fs.Path)":{"name":"addResource","returnType":"void","args":["org.apache.hadoop.fs.Path"],"exceptions":[]},"java.util.Set getFinalParameters()":{"name":"getFinalParameters","returnType":"java.util.Set","args":[],"exceptions":[]},"java.lang.String getTrimmed(java.lang.String, java.lang.String)":{"name":"getTrimmed","returnType":"java.lang.String","args":["java.lang.String","java.lang.String"],"exceptions":[]},"void setPattern(java.lang.String, java.util.regex.Pattern)":{"name":"setPattern","returnType":"void","args":["java.lang.String","java.util.regex.Pattern"],"exceptions":[]},"int size()":{"name":"size","returnType":"int","args":[],"exceptions":[]},"void addResource(org.apache.hadoop.conf.Configuration)":{"name":"addResource","returnType":"void","args":["org.apache.hadoop.conf.Configuration"],"exceptions":[]},"java.util.List getInstances(java.lang.String, java.lang.Class)":{"name":"getInstances","returnType":"java.util.List","args":["java.lang.String","java.lang.Class"],"exceptions":[]},"void addResource(java.net.URL)":{"name":"addResource","returnType":"void","args":["java.net.URL"],"exceptions":[]},"void setFloat(java.lang.String, float)":{"name":"setFloat","returnType":"void","args":["java.lang.String","float"],"exceptions":[]},"void set(java.lang.String, java.lang.String, java.lang.String)":{"name":"set","returnType":"void","args":["java.lang.String","java.lang.String","java.lang.String"],"exceptions":[]},"void setBooleanIfUnset(java.lang.String, boolean)":{"name":"setBooleanIfUnset","returnType":"void","args":["java.lang.String","boolean"],"exceptions":[]},"void reloadConfiguration()":{"name":"reloadConfiguration","returnType":"void","args":[],"exceptions":[]},"java.util.regex.Pattern getPattern(java.lang.String, java.util.regex.Pattern)":{"name":"getPattern","returnType":"java.util.regex.Pattern","args":["java.lang.String","java.util.regex.Pattern"],"exceptions":[]},"java.net.InetSocketAddress updateConnectAddr(java.lang.String, java.net.InetSocketAddress)":{"name":"updateConnectAddr","returnType":"java.net.InetSocketAddress","args":["java.lang.String","java.net.InetSocketAddress"],"exceptions":[]},"java.lang.String get(java.lang.String, java.lang.String)":{"name":"get","returnType":"java.lang.String","args":["java.lang.String","java.lang.String"],"exceptions":[]},"void setDeprecatedProperties()":{"name":"setDeprecatedProperties","returnType":"void","args":[],"exceptions":[]},"boolean onlyKeyExists(java.lang.String)":{"name":"onlyKeyExists","returnType":"boolean","args":["java.lang.String"],"exceptions":[]},"java.util.Iterator iterator()":{"name":"iterator","returnType":"java.util.Iterator","args":[],"exceptions":[]},"org.apache.hadoop.fs.Path getLocalPath(java.lang.String, java.lang.String) throws java.io.IOException":{"name":"getLocalPath","returnType":"org.apache.hadoop.fs.Path","args":["java.lang.String","java.lang.String"],"exceptions":["java.io.IOException"]},"java.lang.Class getClassByName(java.lang.String) throws java.lang.ClassNotFoundException":{"name":"getClassByName","returnType":"java.lang.Class","args":["java.lang.String"],"exceptions":["java.lang.ClassNotFoundException"]},"java.io.InputStream getConfResourceAsInputStream(java.lang.String)":{"name":"getConfResourceAsInputStream","returnType":"java.io.InputStream","args":["java.lang.String"],"exceptions":[]},"[Ljava.lang.String; getTrimmedStrings(java.lang.String)":{"name":"getTrimmedStrings","returnType":"[Ljava.lang.String;","args":["java.lang.String"],"exceptions":[]},"void writeXml(java.io.Writer) throws java.io.IOException":{"name":"writeXml","returnType":"void","args":["java.io.Writer"],"exceptions":["java.io.IOException"]},"void clear()":{"name":"clear","returnType":"void","args":[],"exceptions":[]},"java.net.URL getResource(java.lang.String)":{"name":"getResource","returnType":"java.net.URL","args":["java.lang.String"],"exceptions":[]},"java.net.InetSocketAddress updateConnectAddr(java.lang.String, java.lang.String, java.lang.String, java.net.InetSocketAddress)":{"name":"updateConnectAddr","returnType":"java.net.InetSocketAddress","args":["java.lang.String","java.lang.String","java.lang.String","java.net.InetSocketAddress"],"exceptions":[]},"boolean getBoolean(java.lang.String, boolean)":{"name":"getBoolean","returnType":"boolean","args":["java.lang.String","boolean"],"exceptions":[]},"void main([Ljava.lang.String;) throws java.lang.Exception":{"name":"main","returnType":"void","args":["[Ljava.lang.String;"],"exceptions":["java.lang.Exception"]},"java.lang.Enum getEnum(java.lang.String, java.lang.Enum)":{"name":"getEnum","returnType":"java.lang.Enum","args":["java.lang.String","java.lang.Enum"],"exceptions":[]},"void set(java.lang.String, java.lang.String)":{"name":"set","returnType":"void","args":["java.lang.String","java.lang.String"],"exceptions":[]},"void setEnum(java.lang.String, java.lang.Enum)":{"name":"setEnum","returnType":"void","args":["java.lang.String","java.lang.Enum"],"exceptions":[]},"void addDeprecation(java.lang.String, java.lang.String)":{"name":"addDeprecation","returnType":"void","args":["java.lang.String","java.lang.String"],"exceptions":[]},"[Ljava.lang.Class; getClasses(java.lang.String, [Ljava.lang.Class;)":{"name":"getClasses","returnType":"[Ljava.lang.Class;","args":["java.lang.String","[Ljava.lang.Class;"],"exceptions":[]},"float getFloat(java.lang.String, float)":{"name":"getFloat","returnType":"float","args":["java.lang.String","float"],"exceptions":[]},"long getLongBytes(java.lang.String, long)":{"name":"getLongBytes","returnType":"long","args":["java.lang.String","long"],"exceptions":[]},"java.lang.Class getClassByNameOrNull(java.lang.String)":{"name":"getClassByNameOrNull","returnType":"java.lang.Class","args":["java.lang.String"],"exceptions":[]},"void setStrings(java.lang.String, [Ljava.lang.String;)":{"name":"setStrings","returnType":"void","args":["java.lang.String","[Ljava.lang.String;"],"exceptions":[]},"void addDeprecations([Lorg.apache.hadoop.conf.Configuration$DeprecationDelta;)":{"name":"addDeprecations","returnType":"void","args":["[Lorg.apache.hadoop.conf.Configuration$DeprecationDelta;"],"exceptions":[]},"[Ljava.lang.String; getPropertySources(java.lang.String)":{"name":"getPropertySources","returnType":"[Ljava.lang.String;","args":["java.lang.String"],"exceptions":[]},"org.apache.hadoop.conf.Configuration$IntegerRanges getRange(java.lang.String, java.lang.String)":{"name":"getRange","returnType":"org.apache.hadoop.conf.Configuration$IntegerRanges","args":["java.lang.String","java.lang.String"],"exceptions":[]},"void setLong(java.lang.String, long)":{"name":"setLong","returnType":"void","args":["java.lang.String","long"],"exceptions":[]},"void setQuietMode(boolean)":{"name":"setQuietMode","returnType":"void","args":["boolean"],"exceptions":[]},"void setClassLoader(java.lang.ClassLoader)":{"name":"setClassLoader","returnType":"void","args":["java.lang.ClassLoader"],"exceptions":[]},"[C getPassword(java.lang.String) throws java.io.IOException":{"name":"getPassword","returnType":"[C","args":["java.lang.String"],"exceptions":["java.io.IOException"]},"void setTimeDuration(java.lang.String, long, java.util.concurrent.TimeUnit)":{"name":"setTimeDuration","returnType":"void","args":["java.lang.String","long","java.util.concurrent.TimeUnit"],"exceptions":[]},"void setDouble(java.lang.String, double)":{"name":"setDouble","returnType":"void","args":["java.lang.String","double"],"exceptions":[]},"void addDeprecation(java.lang.String, [Ljava.lang.String;, java.lang.String)":{"name":"addDeprecation","returnType":"void","args":["java.lang.String","[Ljava.lang.String;","java.lang.String"],"exceptions":[]},"java.lang.String get(java.lang.String)":{"name":"get","returnType":"java.lang.String","args":["java.lang.String"],"exceptions":[]},"java.lang.Class getClass(java.lang.String, java.lang.Class)":{"name":"getClass","returnType":"java.lang.Class","args":["java.lang.String","java.lang.Class"],"exceptions":[]},"void setClass(java.lang.String, java.lang.Class, java.lang.Class)":{"name":"setClass","returnType":"void","args":["java.lang.String","java.lang.Class","java.lang.Class"],"exceptions":[]},"java.util.Collection getStringCollection(java.lang.String)":{"name":"getStringCollection","returnType":"java.util.Collection","args":["java.lang.String"],"exceptions":[]},"java.lang.String toString()":{"name":"toString","returnType":"java.lang.String","args":[],"exceptions":[]},"void write(java.io.DataOutput) throws java.io.IOException":{"name":"write","returnType":"void","args":["java.io.DataOutput"],"exceptions":["java.io.IOException"]},"java.io.File getFile(java.lang.String, java.lang.String) throws java.io.IOException":{"name":"getFile","returnType":"java.io.File","args":["java.lang.String","java.lang.String"],"exceptions":["java.io.IOException"]},"double getDouble(java.lang.String, double)":{"name":"getDouble","returnType":"double","args":["java.lang.String","double"],"exceptions":[]},"void setBoolean(java.lang.String, boolean)":{"name":"setBoolean","returnType":"void","args":["java.lang.String","boolean"],"exceptions":[]},"boolean isDeprecated(java.lang.String)":{"name":"isDeprecated","returnType":"boolean","args":["java.lang.String"],"exceptions":[]},"java.lang.String getTrimmed(java.lang.String)":{"name":"getTrimmed","returnType":"java.lang.String","args":["java.lang.String"],"exceptions":[]},"void setInt(java.lang.String, int)":{"name":"setInt","returnType":"void","args":["java.lang.String","int"],"exceptions":[]},"void addDeprecation(java.lang.String, java.lang.String, java.lang.String)":{"name":"addDeprecation","returnType":"void","args":["java.lang.String","java.lang.String","java.lang.String"],"exceptions":[]},"long getLong(java.lang.String, long)":{"name":"getLong","returnType":"long","args":["java.lang.String","long"],"exceptions":[]},"void addDeprecation(java.lang.String, [Ljava.lang.String;)":{"name":"addDeprecation","returnType":"void","args":["java.lang.String","[Ljava.lang.String;"],"exceptions":[]},"void setAllowNullValueProperties(boolean)":{"name":"setAllowNullValueProperties","returnType":"void","args":["boolean"],"exceptions":[]},"java.util.Collection getTrimmedStringCollection(java.lang.String)":{"name":"getTrimmedStringCollection","returnType":"java.util.Collection","args":["java.lang.String"],"exceptions":[]},"[Ljava.lang.String; getStrings(java.lang.String, [Ljava.lang.String;)":{"name":"getStrings","returnType":"[Ljava.lang.String;","args":["java.lang.String","[Ljava.lang.String;"],"exceptions":[]},"java.io.Reader getConfResourceAsReader(java.lang.String)":{"name":"getConfResourceAsReader","returnType":"java.io.Reader","args":["java.lang.String"],"exceptions":[]},"long getTimeDuration(java.lang.String, long, java.util.concurrent.TimeUnit)":{"name":"getTimeDuration","returnType":"long","args":["java.lang.String","long","java.util.concurrent.TimeUnit"],"exceptions":[]},"void addResource(java.io.InputStream)":{"name":"addResource","returnType":"void","args":["java.io.InputStream"],"exceptions":[]},"java.net.InetSocketAddress getSocketAddr(java.lang.String, java.lang.String, java.lang.String, int)":{"name":"getSocketAddr","returnType":"java.net.InetSocketAddress","args":["java.lang.String","java.lang.String","java.lang.String","int"],"exceptions":[]},"void dumpDeprecatedKeys()":{"name":"dumpDeprecatedKeys","returnType":"void","args":[],"exceptions":[]},"[I getInts(java.lang.String)":{"name":"getInts","returnType":"[I","args":["java.lang.String"],"exceptions":[]},"void addResource(java.lang.String)":{"name":"addResource","returnType":"void","args":["java.lang.String"],"exceptions":[]},"[Ljava.lang.String; getTrimmedStrings(java.lang.String, [Ljava.lang.String;)":{"name":"getTrimmedStrings","returnType":"[Ljava.lang.String;","args":["java.lang.String","[Ljava.lang.String;"],"exceptions":[]},"java.lang.Class getClass(java.lang.String, java.lang.Class, java.lang.Class)":{"name":"getClass","returnType":"java.lang.Class","args":["java.lang.String","java.lang.Class","java.lang.Class"],"exceptions":[]},"void setIfUnset(java.lang.String, java.lang.String)":{"name":"setIfUnset","returnType":"void","args":["java.lang.String","java.lang.String"],"exceptions":[]},"void unset(java.lang.String)":{"name":"unset","returnType":"void","args":["java.lang.String"],"exceptions":[]},"void dumpConfiguration(org.apache.hadoop.conf.Configuration, java.io.Writer) throws java.io.IOException":{"name":"dumpConfiguration","returnType":"void","args":["org.apache.hadoop.conf.Configuration","java.io.Writer"],"exceptions":["java.io.IOException"]},"[Ljava.lang.String; getStrings(java.lang.String)":{"name":"getStrings","returnType":"[Ljava.lang.String;","args":["java.lang.String"],"exceptions":[]},"void addResource(java.io.InputStream, java.lang.String)":{"name":"addResource","returnType":"void","args":["java.io.InputStream","java.lang.String"],"exceptions":[]},"java.util.Map getValByRegex(java.lang.String)":{"name":"getValByRegex","returnType":"java.util.Map","args":["java.lang.String"],"exceptions":[]},"void setSocketAddr(java.lang.String, java.net.InetSocketAddress)":{"name":"setSocketAddr","returnType":"void","args":["java.lang.String","java.net.InetSocketAddress"],"exceptions":[]},"int getInt(java.lang.String, int)":{"name":"getInt","returnType":"int","args":["java.lang.String","int"],"exceptions":[]},"void writeXml(java.io.OutputStream) throws java.io.IOException":{"name":"writeXml","returnType":"void","args":["java.io.OutputStream"],"exceptions":["java.io.IOException"]},"java.lang.ClassLoader getClassLoader()":{"name":"getClassLoader","returnType":"java.lang.ClassLoader","args":[],"exceptions":[]},"void addDefaultResource(java.lang.String)":{"name":"addDefaultResource","returnType":"void","args":["java.lang.String"],"exceptions":[]},"java.net.InetSocketAddress getSocketAddr(java.lang.String, java.lang.String, int)":{"name":"getSocketAddr","returnType":"java.net.InetSocketAddress","args":["java.lang.String","java.lang.String","int"],"exceptions":[]},"boolean hasWarnedDeprecation(java.lang.String)":{"name":"hasWarnedDeprecation","returnType":"boolean","args":["java.lang.String"],"exceptions":[]},"java.lang.String getRaw(java.lang.String)":{"name":"getRaw","returnType":"java.lang.String","args":["java.lang.String"],"exceptions":[]},"void readFields(java.io.DataInput) throws java.io.IOException":{"name":"readFields","returnType":"void","args":["java.io.DataInput"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.io.WritableFactories":{"name":"org.apache.hadoop.io.WritableFactories","methods":{"org.apache.hadoop.io.WritableFactory getFactory(java.lang.Class)":{"name":"getFactory","returnType":"org.apache.hadoop.io.WritableFactory","args":["java.lang.Class"],"exceptions":[]},"void setFactory(java.lang.Class, org.apache.hadoop.io.WritableFactory)":{"name":"setFactory","returnType":"void","args":["java.lang.Class","org.apache.hadoop.io.WritableFactory"],"exceptions":[]},"org.apache.hadoop.io.Writable newInstance(java.lang.Class, org.apache.hadoop.conf.Configuration)":{"name":"newInstance","returnType":"org.apache.hadoop.io.Writable","args":["java.lang.Class","org.apache.hadoop.conf.Configuration"],"exceptions":[]},"org.apache.hadoop.io.Writable newInstance(java.lang.Class)":{"name":"newInstance","returnType":"org.apache.hadoop.io.Writable","args":["java.lang.Class"],"exceptions":[]}}},"org.apache.hadoop.io.SetFile":{"name":"org.apache.hadoop.io.SetFile","methods":{}},"org.apache.hadoop.record.compiler.JString":{"name":"org.apache.hadoop.record.compiler.JString","methods":{}},"org.apache.hadoop.record.compiler.JBoolean":{"name":"org.apache.hadoop.record.compiler.JBoolean","methods":{}},"org.apache.hadoop.io.ShortWritable":{"name":"org.apache.hadoop.io.ShortWritable","methods":{"int hashCode()":{"name":"hashCode","returnType":"int","args":[],"exceptions":[]},"int compareTo(org.apache.hadoop.io.ShortWritable)":{"name":"compareTo","returnType":"int","args":["org.apache.hadoop.io.ShortWritable"],"exceptions":[]},"java.lang.String toString()":{"name":"toString","returnType":"java.lang.String","args":[],"exceptions":[]},"void write(java.io.DataOutput) throws java.io.IOException":{"name":"write","returnType":"void","args":["java.io.DataOutput"],"exceptions":["java.io.IOException"]},"int compareTo(java.lang.Object)":{"name":"compareTo","returnType":"int","args":["java.lang.Object"],"exceptions":[]},"short get()":{"name":"get","returnType":"short","args":[],"exceptions":[]},"boolean equals(java.lang.Object)":{"name":"equals","returnType":"boolean","args":["java.lang.Object"],"exceptions":[]},"void set(short)":{"name":"set","returnType":"void","args":["short"],"exceptions":[]},"void readFields(java.io.DataInput) throws java.io.IOException":{"name":"readFields","returnType":"void","args":["java.io.DataInput"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.fs.InvalidPathException":{"name":"org.apache.hadoop.fs.InvalidPathException","methods":{}},"org.apache.hadoop.record.compiler.JVector":{"name":"org.apache.hadoop.record.compiler.JVector","methods":{}},"org.apache.hadoop.io.ArrayWritable":{"name":"org.apache.hadoop.io.ArrayWritable","methods":{"void write(java.io.DataOutput) throws java.io.IOException":{"name":"write","returnType":"void","args":["java.io.DataOutput"],"exceptions":["java.io.IOException"]},"[Lorg.apache.hadoop.io.Writable; get()":{"name":"get","returnType":"[Lorg.apache.hadoop.io.Writable;","args":[],"exceptions":[]},"void set([Lorg.apache.hadoop.io.Writable;)":{"name":"set","returnType":"void","args":["[Lorg.apache.hadoop.io.Writable;"],"exceptions":[]},"[Ljava.lang.String; toStrings()":{"name":"toStrings","returnType":"[Ljava.lang.String;","args":[],"exceptions":[]},"java.lang.Class getValueClass()":{"name":"getValueClass","returnType":"java.lang.Class","args":[],"exceptions":[]},"java.lang.Object toArray()":{"name":"toArray","returnType":"java.lang.Object","args":[],"exceptions":[]},"void readFields(java.io.DataInput) throws java.io.IOException":{"name":"readFields","returnType":"void","args":["java.io.DataInput"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.io.IntWritable":{"name":"org.apache.hadoop.io.IntWritable","methods":{"int hashCode()":{"name":"hashCode","returnType":"int","args":[],"exceptions":[]},"java.lang.String toString()":{"name":"toString","returnType":"java.lang.String","args":[],"exceptions":[]},"void write(java.io.DataOutput) throws java.io.IOException":{"name":"write","returnType":"void","args":["java.io.DataOutput"],"exceptions":["java.io.IOException"]},"void set(int)":{"name":"set","returnType":"void","args":["int"],"exceptions":[]},"int compareTo(org.apache.hadoop.io.IntWritable)":{"name":"compareTo","returnType":"int","args":["org.apache.hadoop.io.IntWritable"],"exceptions":[]},"int get()":{"name":"get","returnType":"int","args":[],"exceptions":[]},"int compareTo(java.lang.Object)":{"name":"compareTo","returnType":"int","args":["java.lang.Object"],"exceptions":[]},"boolean equals(java.lang.Object)":{"name":"equals","returnType":"boolean","args":["java.lang.Object"],"exceptions":[]},"void readFields(java.io.DataInput) throws java.io.IOException":{"name":"readFields","returnType":"void","args":["java.io.DataInput"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.io.TwoDArrayWritable":{"name":"org.apache.hadoop.io.TwoDArrayWritable","methods":{"[[Lorg.apache.hadoop.io.Writable; get()":{"name":"get","returnType":"[[Lorg.apache.hadoop.io.Writable;","args":[],"exceptions":[]},"void write(java.io.DataOutput) throws java.io.IOException":{"name":"write","returnType":"void","args":["java.io.DataOutput"],"exceptions":["java.io.IOException"]},"void set([[Lorg.apache.hadoop.io.Writable;)":{"name":"set","returnType":"void","args":["[[Lorg.apache.hadoop.io.Writable;"],"exceptions":[]},"java.lang.Object toArray()":{"name":"toArray","returnType":"java.lang.Object","args":[],"exceptions":[]},"void readFields(java.io.DataInput) throws java.io.IOException":{"name":"readFields","returnType":"void","args":["java.io.DataInput"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.fs.FSDataInputStream":{"name":"org.apache.hadoop.fs.FSDataInputStream","methods":{"void readFully(long, [B) throws java.io.IOException":{"name":"readFully","returnType":"void","args":["long","[B"],"exceptions":["java.io.IOException"]},"java.nio.ByteBuffer read(org.apache.hadoop.io.ByteBufferPool, int) throws java.lang.UnsupportedOperationException, java.io.IOException":{"name":"read","returnType":"java.nio.ByteBuffer","args":["org.apache.hadoop.io.ByteBufferPool","int"],"exceptions":["java.lang.UnsupportedOperationException","java.io.IOException"]},"void readFully(long, [B, int, int) throws java.io.IOException":{"name":"readFully","returnType":"void","args":["long","[B","int","int"],"exceptions":["java.io.IOException"]},"void unbuffer()":{"name":"unbuffer","returnType":"void","args":[],"exceptions":[]},"void seek(long) throws java.io.IOException":{"name":"seek","returnType":"void","args":["long"],"exceptions":["java.io.IOException"]},"long getPos() throws java.io.IOException":{"name":"getPos","returnType":"long","args":[],"exceptions":["java.io.IOException"]},"void setReadahead(java.lang.Long) throws java.lang.UnsupportedOperationException, java.io.IOException":{"name":"setReadahead","returnType":"void","args":["java.lang.Long"],"exceptions":["java.lang.UnsupportedOperationException","java.io.IOException"]},"void releaseBuffer(java.nio.ByteBuffer)":{"name":"releaseBuffer","returnType":"void","args":["java.nio.ByteBuffer"],"exceptions":[]},"java.io.InputStream getWrappedStream()":{"name":"getWrappedStream","returnType":"java.io.InputStream","args":[],"exceptions":[]},"java.nio.ByteBuffer read(org.apache.hadoop.io.ByteBufferPool, int, java.util.EnumSet) throws java.lang.UnsupportedOperationException, java.io.IOException":{"name":"read","returnType":"java.nio.ByteBuffer","args":["org.apache.hadoop.io.ByteBufferPool","int","java.util.EnumSet"],"exceptions":["java.lang.UnsupportedOperationException","java.io.IOException"]},"int read(long, [B, int, int) throws java.io.IOException":{"name":"read","returnType":"int","args":["long","[B","int","int"],"exceptions":["java.io.IOException"]},"void setDropBehind(java.lang.Boolean) throws java.lang.UnsupportedOperationException, java.io.IOException":{"name":"setDropBehind","returnType":"void","args":["java.lang.Boolean"],"exceptions":["java.lang.UnsupportedOperationException","java.io.IOException"]},"int read(java.nio.ByteBuffer) throws java.io.IOException":{"name":"read","returnType":"int","args":["java.nio.ByteBuffer"],"exceptions":["java.io.IOException"]},"java.io.FileDescriptor getFileDescriptor() throws java.io.IOException":{"name":"getFileDescriptor","returnType":"java.io.FileDescriptor","args":[],"exceptions":["java.io.IOException"]},"boolean seekToNewSource(long) throws java.io.IOException":{"name":"seekToNewSource","returnType":"boolean","args":["long"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.record.compiler.JFloat":{"name":"org.apache.hadoop.record.compiler.JFloat","methods":{}},"org.apache.hadoop.record.compiler.generated.RccConstants":{"name":"org.apache.hadoop.record.compiler.generated.RccConstants","methods":{}},"org.apache.hadoop.io.ArrayPrimitiveWritable":{"name":"org.apache.hadoop.io.ArrayPrimitiveWritable","methods":{"boolean isDeclaredComponentType(java.lang.Class)":{"name":"isDeclaredComponentType","returnType":"boolean","args":["java.lang.Class"],"exceptions":[]},"void write(java.io.DataOutput) throws java.io.IOException":{"name":"write","returnType":"void","args":["java.io.DataOutput"],"exceptions":["java.io.IOException"]},"java.lang.Class getComponentType()":{"name":"getComponentType","returnType":"java.lang.Class","args":[],"exceptions":[]},"void set(java.lang.Object)":{"name":"set","returnType":"void","args":["java.lang.Object"],"exceptions":[]},"java.lang.Class getDeclaredComponentType()":{"name":"getDeclaredComponentType","returnType":"java.lang.Class","args":[],"exceptions":[]},"java.lang.Object get()":{"name":"get","returnType":"java.lang.Object","args":[],"exceptions":[]},"void readFields(java.io.DataInput) throws java.io.IOException":{"name":"readFields","returnType":"void","args":["java.io.DataInput"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.fs.FileChecksum":{"name":"org.apache.hadoop.fs.FileChecksum","methods":{"int hashCode()":{"name":"hashCode","returnType":"int","args":[],"exceptions":[]},"java.lang.String getAlgorithmName()":{"name":"getAlgorithmName","returnType":"java.lang.String","args":[],"exceptions":[]},"int getLength()":{"name":"getLength","returnType":"int","args":[],"exceptions":[]},"org.apache.hadoop.fs.Options$ChecksumOpt getChecksumOpt()":{"name":"getChecksumOpt","returnType":"org.apache.hadoop.fs.Options$ChecksumOpt","args":[],"exceptions":[]},"[B getBytes()":{"name":"getBytes","returnType":"[B","args":[],"exceptions":[]},"boolean equals(java.lang.Object)":{"name":"equals","returnType":"boolean","args":["java.lang.Object"],"exceptions":[]}}},"org.apache.hadoop.record.BinaryRecordOutput":{"name":"org.apache.hadoop.record.BinaryRecordOutput","methods":{"void writeBuffer(org.apache.hadoop.record.Buffer, java.lang.String) throws java.io.IOException":{"name":"writeBuffer","returnType":"void","args":["org.apache.hadoop.record.Buffer","java.lang.String"],"exceptions":["java.io.IOException"]},"void startMap(java.util.TreeMap, java.lang.String) throws java.io.IOException":{"name":"startMap","returnType":"void","args":["java.util.TreeMap","java.lang.String"],"exceptions":["java.io.IOException"]},"void writeBool(boolean, java.lang.String) throws java.io.IOException":{"name":"writeBool","returnType":"void","args":["boolean","java.lang.String"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.record.BinaryRecordOutput get(java.io.DataOutput)":{"name":"get","returnType":"org.apache.hadoop.record.BinaryRecordOutput","args":["java.io.DataOutput"],"exceptions":[]},"void writeFloat(float, java.lang.String) throws java.io.IOException":{"name":"writeFloat","returnType":"void","args":["float","java.lang.String"],"exceptions":["java.io.IOException"]},"void writeInt(int, java.lang.String) throws java.io.IOException":{"name":"writeInt","returnType":"void","args":["int","java.lang.String"],"exceptions":["java.io.IOException"]},"void startRecord(org.apache.hadoop.record.Record, java.lang.String) throws java.io.IOException":{"name":"startRecord","returnType":"void","args":["org.apache.hadoop.record.Record","java.lang.String"],"exceptions":["java.io.IOException"]},"void endRecord(org.apache.hadoop.record.Record, java.lang.String) throws java.io.IOException":{"name":"endRecord","returnType":"void","args":["org.apache.hadoop.record.Record","java.lang.String"],"exceptions":["java.io.IOException"]},"void endVector(java.util.ArrayList, java.lang.String) throws java.io.IOException":{"name":"endVector","returnType":"void","args":["java.util.ArrayList","java.lang.String"],"exceptions":["java.io.IOException"]},"void endMap(java.util.TreeMap, java.lang.String) throws java.io.IOException":{"name":"endMap","returnType":"void","args":["java.util.TreeMap","java.lang.String"],"exceptions":["java.io.IOException"]},"void writeByte(byte, java.lang.String) throws java.io.IOException":{"name":"writeByte","returnType":"void","args":["byte","java.lang.String"],"exceptions":["java.io.IOException"]},"void startVector(java.util.ArrayList, java.lang.String) throws java.io.IOException":{"name":"startVector","returnType":"void","args":["java.util.ArrayList","java.lang.String"],"exceptions":["java.io.IOException"]},"void writeDouble(double, java.lang.String) throws java.io.IOException":{"name":"writeDouble","returnType":"void","args":["double","java.lang.String"],"exceptions":["java.io.IOException"]},"void writeLong(long, java.lang.String) throws java.io.IOException":{"name":"writeLong","returnType":"void","args":["long","java.lang.String"],"exceptions":["java.io.IOException"]},"void writeString(java.lang.String, java.lang.String) throws java.io.IOException":{"name":"writeString","returnType":"void","args":["java.lang.String","java.lang.String"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.fs.FsConstants":{"name":"org.apache.hadoop.fs.FsConstants","methods":{}},"org.apache.hadoop.conf.ReconfigurationTaskStatus":{"name":"org.apache.hadoop.conf.ReconfigurationTaskStatus","methods":{"long getStartTime()":{"name":"getStartTime","returnType":"long","args":[],"exceptions":[]},"boolean stopped()":{"name":"stopped","returnType":"boolean","args":[],"exceptions":[]},"boolean hasTask()":{"name":"hasTask","returnType":"boolean","args":[],"exceptions":[]},"java.util.Map getStatus()":{"name":"getStatus","returnType":"java.util.Map","args":[],"exceptions":[]},"long getEndTime()":{"name":"getEndTime","returnType":"long","args":[],"exceptions":[]}}},"org.apache.hadoop.record.compiler.Consts":{"name":"org.apache.hadoop.record.compiler.Consts","methods":{}},"org.apache.hadoop.fs.ftp.FTPFileSystem":{"name":"org.apache.hadoop.fs.ftp.FTPFileSystem","methods":{"void initialize(java.net.URI, org.apache.hadoop.conf.Configuration) throws java.io.IOException":{"name":"initialize","returnType":"void","args":["java.net.URI","org.apache.hadoop.conf.Configuration"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.FSDataOutputStream append(org.apache.hadoop.fs.Path, int, org.apache.hadoop.util.Progressable) throws java.io.IOException":{"name":"append","returnType":"org.apache.hadoop.fs.FSDataOutputStream","args":["org.apache.hadoop.fs.Path","int","org.apache.hadoop.util.Progressable"],"exceptions":["java.io.IOException"]},"java.lang.String getScheme()":{"name":"getScheme","returnType":"java.lang.String","args":[],"exceptions":[]},"org.apache.hadoop.fs.FSDataInputStream open(org.apache.hadoop.fs.Path, int) throws java.io.IOException":{"name":"open","returnType":"org.apache.hadoop.fs.FSDataInputStream","args":["org.apache.hadoop.fs.Path","int"],"exceptions":["java.io.IOException"]},"java.net.URI getUri()":{"name":"getUri","returnType":"java.net.URI","args":[],"exceptions":[]},"org.apache.hadoop.fs.Path getHomeDirectory()":{"name":"getHomeDirectory","returnType":"org.apache.hadoop.fs.Path","args":[],"exceptions":[]},"boolean rename(org.apache.hadoop.fs.Path, org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"rename","returnType":"boolean","args":["org.apache.hadoop.fs.Path","org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.FileStatus getFileStatus(org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"getFileStatus","returnType":"org.apache.hadoop.fs.FileStatus","args":["org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"boolean delete(org.apache.hadoop.fs.Path, boolean) throws java.io.IOException":{"name":"delete","returnType":"boolean","args":["org.apache.hadoop.fs.Path","boolean"],"exceptions":["java.io.IOException"]},"boolean mkdirs(org.apache.hadoop.fs.Path, org.apache.hadoop.fs.permission.FsPermission) throws java.io.IOException":{"name":"mkdirs","returnType":"boolean","args":["org.apache.hadoop.fs.Path","org.apache.hadoop.fs.permission.FsPermission"],"exceptions":["java.io.IOException"]},"[Lorg.apache.hadoop.fs.FileStatus; listStatus(org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"listStatus","returnType":"[Lorg.apache.hadoop.fs.FileStatus;","args":["org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.Path getWorkingDirectory()":{"name":"getWorkingDirectory","returnType":"org.apache.hadoop.fs.Path","args":[],"exceptions":[]},"org.apache.hadoop.fs.FSDataOutputStream create(org.apache.hadoop.fs.Path, org.apache.hadoop.fs.permission.FsPermission, boolean, int, short, long, org.apache.hadoop.util.Progressable) throws java.io.IOException":{"name":"create","returnType":"org.apache.hadoop.fs.FSDataOutputStream","args":["org.apache.hadoop.fs.Path","org.apache.hadoop.fs.permission.FsPermission","boolean","int","short","long","org.apache.hadoop.util.Progressable"],"exceptions":["java.io.IOException"]},"void setWorkingDirectory(org.apache.hadoop.fs.Path)":{"name":"setWorkingDirectory","returnType":"void","args":["org.apache.hadoop.fs.Path"],"exceptions":[]}}},"org.apache.hadoop.record.compiler.generated.Rcc":{"name":"org.apache.hadoop.record.compiler.generated.Rcc","methods":{"void ReInit(java.io.InputStream)":{"name":"ReInit","returnType":"void","args":["java.io.InputStream"],"exceptions":[]},"org.apache.hadoop.record.compiler.JFile Input() throws org.apache.hadoop.record.compiler.generated.ParseException":{"name":"Input","returnType":"org.apache.hadoop.record.compiler.JFile","args":[],"exceptions":["org.apache.hadoop.record.compiler.generated.ParseException"]},"java.util.ArrayList Module() throws org.apache.hadoop.record.compiler.generated.ParseException":{"name":"Module","returnType":"java.util.ArrayList","args":[],"exceptions":["org.apache.hadoop.record.compiler.generated.ParseException"]},"org.apache.hadoop.record.compiler.JVector Vector() throws org.apache.hadoop.record.compiler.generated.ParseException":{"name":"Vector","returnType":"org.apache.hadoop.record.compiler.JVector","args":[],"exceptions":["org.apache.hadoop.record.compiler.generated.ParseException"]},"org.apache.hadoop.record.compiler.JFile Include() throws org.apache.hadoop.record.compiler.generated.ParseException":{"name":"Include","returnType":"org.apache.hadoop.record.compiler.JFile","args":[],"exceptions":["org.apache.hadoop.record.compiler.generated.ParseException"]},"java.lang.String ModuleName() throws org.apache.hadoop.record.compiler.generated.ParseException":{"name":"ModuleName","returnType":"java.lang.String","args":[],"exceptions":["org.apache.hadoop.record.compiler.generated.ParseException"]},"void main([Ljava.lang.String;)":{"name":"main","returnType":"void","args":["[Ljava.lang.String;"],"exceptions":[]},"org.apache.hadoop.record.compiler.generated.Token getToken(int)":{"name":"getToken","returnType":"org.apache.hadoop.record.compiler.generated.Token","args":["int"],"exceptions":[]},"org.apache.hadoop.record.compiler.generated.Token getNextToken()":{"name":"getNextToken","returnType":"org.apache.hadoop.record.compiler.generated.Token","args":[],"exceptions":[]},"org.apache.hadoop.record.compiler.generated.ParseException generateParseException()":{"name":"generateParseException","returnType":"org.apache.hadoop.record.compiler.generated.ParseException","args":[],"exceptions":[]},"java.util.ArrayList RecordList() throws org.apache.hadoop.record.compiler.generated.ParseException":{"name":"RecordList","returnType":"java.util.ArrayList","args":[],"exceptions":["org.apache.hadoop.record.compiler.generated.ParseException"]},"void disable_tracing()":{"name":"disable_tracing","returnType":"void","args":[],"exceptions":[]},"org.apache.hadoop.record.compiler.JType Type() throws org.apache.hadoop.record.compiler.generated.ParseException":{"name":"Type","returnType":"org.apache.hadoop.record.compiler.JType","args":[],"exceptions":["org.apache.hadoop.record.compiler.generated.ParseException"]},"org.apache.hadoop.record.compiler.JField Field() throws org.apache.hadoop.record.compiler.generated.ParseException":{"name":"Field","returnType":"org.apache.hadoop.record.compiler.JField","args":[],"exceptions":["org.apache.hadoop.record.compiler.generated.ParseException"]},"org.apache.hadoop.record.compiler.JMap Map() throws org.apache.hadoop.record.compiler.generated.ParseException":{"name":"Map","returnType":"org.apache.hadoop.record.compiler.JMap","args":[],"exceptions":["org.apache.hadoop.record.compiler.generated.ParseException"]},"int driver([Ljava.lang.String;)":{"name":"driver","returnType":"int","args":["[Ljava.lang.String;"],"exceptions":[]},"void ReInit(org.apache.hadoop.record.compiler.generated.RccTokenManager)":{"name":"ReInit","returnType":"void","args":["org.apache.hadoop.record.compiler.generated.RccTokenManager"],"exceptions":[]},"void enable_tracing()":{"name":"enable_tracing","returnType":"void","args":[],"exceptions":[]},"void ReInit(java.io.InputStream, java.lang.String)":{"name":"ReInit","returnType":"void","args":["java.io.InputStream","java.lang.String"],"exceptions":[]},"void ReInit(java.io.Reader)":{"name":"ReInit","returnType":"void","args":["java.io.Reader"],"exceptions":[]},"org.apache.hadoop.record.compiler.JRecord Record() throws org.apache.hadoop.record.compiler.generated.ParseException":{"name":"Record","returnType":"org.apache.hadoop.record.compiler.JRecord","args":[],"exceptions":["org.apache.hadoop.record.compiler.generated.ParseException"]},"void usage()":{"name":"usage","returnType":"void","args":[],"exceptions":[]}}},"org.apache.hadoop.fs.FsStatus":{"name":"org.apache.hadoop.fs.FsStatus","methods":{"void write(java.io.DataOutput) throws java.io.IOException":{"name":"write","returnType":"void","args":["java.io.DataOutput"],"exceptions":["java.io.IOException"]},"long getCapacity()":{"name":"getCapacity","returnType":"long","args":[],"exceptions":[]},"long getUsed()":{"name":"getUsed","returnType":"long","args":[],"exceptions":[]},"long getRemaining()":{"name":"getRemaining","returnType":"long","args":[],"exceptions":[]},"void readFields(java.io.DataInput) throws java.io.IOException":{"name":"readFields","returnType":"void","args":["java.io.DataInput"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.io.BooleanWritable":{"name":"org.apache.hadoop.io.BooleanWritable","methods":{"int hashCode()":{"name":"hashCode","returnType":"int","args":[],"exceptions":[]},"void set(boolean)":{"name":"set","returnType":"void","args":["boolean"],"exceptions":[]},"java.lang.String toString()":{"name":"toString","returnType":"java.lang.String","args":[],"exceptions":[]},"void write(java.io.DataOutput) throws java.io.IOException":{"name":"write","returnType":"void","args":["java.io.DataOutput"],"exceptions":["java.io.IOException"]},"boolean get()":{"name":"get","returnType":"boolean","args":[],"exceptions":[]},"int compareTo(org.apache.hadoop.io.BooleanWritable)":{"name":"compareTo","returnType":"int","args":["org.apache.hadoop.io.BooleanWritable"],"exceptions":[]},"int compareTo(java.lang.Object)":{"name":"compareTo","returnType":"int","args":["java.lang.Object"],"exceptions":[]},"boolean equals(java.lang.Object)":{"name":"equals","returnType":"boolean","args":["java.lang.Object"],"exceptions":[]},"void readFields(java.io.DataInput) throws java.io.IOException":{"name":"readFields","returnType":"void","args":["java.io.DataInput"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.fs.ChecksumFileSystem":{"name":"org.apache.hadoop.fs.ChecksumFileSystem","methods":{"void copyToLocalFile(org.apache.hadoop.fs.Path, org.apache.hadoop.fs.Path, boolean) throws java.io.IOException":{"name":"copyToLocalFile","returnType":"void","args":["org.apache.hadoop.fs.Path","org.apache.hadoop.fs.Path","boolean"],"exceptions":["java.io.IOException"]},"void setOwner(org.apache.hadoop.fs.Path, java.lang.String, java.lang.String) throws java.io.IOException":{"name":"setOwner","returnType":"void","args":["org.apache.hadoop.fs.Path","java.lang.String","java.lang.String"],"exceptions":["java.io.IOException"]},"long getChecksumLength(long, int)":{"name":"getChecksumLength","returnType":"long","args":["long","int"],"exceptions":[]},"org.apache.hadoop.fs.FSDataOutputStream append(org.apache.hadoop.fs.Path, int, org.apache.hadoop.util.Progressable) throws java.io.IOException":{"name":"append","returnType":"org.apache.hadoop.fs.FSDataOutputStream","args":["org.apache.hadoop.fs.Path","int","org.apache.hadoop.util.Progressable"],"exceptions":["java.io.IOException"]},"boolean reportChecksumFailure(org.apache.hadoop.fs.Path, org.apache.hadoop.fs.FSDataInputStream, long, org.apache.hadoop.fs.FSDataInputStream, long)":{"name":"reportChecksumFailure","returnType":"boolean","args":["org.apache.hadoop.fs.Path","org.apache.hadoop.fs.FSDataInputStream","long","org.apache.hadoop.fs.FSDataInputStream","long"],"exceptions":[]},"org.apache.hadoop.fs.FSDataInputStream open(org.apache.hadoop.fs.Path, int) throws java.io.IOException":{"name":"open","returnType":"org.apache.hadoop.fs.FSDataInputStream","args":["org.apache.hadoop.fs.Path","int"],"exceptions":["java.io.IOException"]},"boolean setReplication(org.apache.hadoop.fs.Path, short) throws java.io.IOException":{"name":"setReplication","returnType":"boolean","args":["org.apache.hadoop.fs.Path","short"],"exceptions":["java.io.IOException"]},"void completeLocalOutput(org.apache.hadoop.fs.Path, org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"completeLocalOutput","returnType":"void","args":["org.apache.hadoop.fs.Path","org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"boolean rename(org.apache.hadoop.fs.Path, org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"rename","returnType":"boolean","args":["org.apache.hadoop.fs.Path","org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"[Lorg.apache.hadoop.fs.FileStatus; listStatus(org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"listStatus","returnType":"[Lorg.apache.hadoop.fs.FileStatus;","args":["org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"void setAcl(org.apache.hadoop.fs.Path, java.util.List) throws java.io.IOException":{"name":"setAcl","returnType":"void","args":["org.apache.hadoop.fs.Path","java.util.List"],"exceptions":["java.io.IOException"]},"int getBytesPerSum()":{"name":"getBytesPerSum","returnType":"int","args":[],"exceptions":[]},"long getChecksumFileLength(org.apache.hadoop.fs.Path, long)":{"name":"getChecksumFileLength","returnType":"long","args":["org.apache.hadoop.fs.Path","long"],"exceptions":[]},"void setVerifyChecksum(boolean)":{"name":"setVerifyChecksum","returnType":"void","args":["boolean"],"exceptions":[]},"org.apache.hadoop.fs.RemoteIterator listLocatedStatus(org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"listLocatedStatus","returnType":"org.apache.hadoop.fs.RemoteIterator","args":["org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"void setPermission(org.apache.hadoop.fs.Path, org.apache.hadoop.fs.permission.FsPermission) throws java.io.IOException":{"name":"setPermission","returnType":"void","args":["org.apache.hadoop.fs.Path","org.apache.hadoop.fs.permission.FsPermission"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.FileSystem getRawFileSystem()":{"name":"getRawFileSystem","returnType":"org.apache.hadoop.fs.FileSystem","args":[],"exceptions":[]},"org.apache.hadoop.fs.Path startLocalOutput(org.apache.hadoop.fs.Path, org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"startLocalOutput","returnType":"org.apache.hadoop.fs.Path","args":["org.apache.hadoop.fs.Path","org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"double getApproxChkSumLength(long)":{"name":"getApproxChkSumLength","returnType":"double","args":["long"],"exceptions":[]},"void setWriteChecksum(boolean)":{"name":"setWriteChecksum","returnType":"void","args":["boolean"],"exceptions":[]},"void modifyAclEntries(org.apache.hadoop.fs.Path, java.util.List) throws java.io.IOException":{"name":"modifyAclEntries","returnType":"void","args":["org.apache.hadoop.fs.Path","java.util.List"],"exceptions":["java.io.IOException"]},"boolean mkdirs(org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"mkdirs","returnType":"boolean","args":["org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"void removeAcl(org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"removeAcl","returnType":"void","args":["org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"boolean delete(org.apache.hadoop.fs.Path, boolean) throws java.io.IOException":{"name":"delete","returnType":"boolean","args":["org.apache.hadoop.fs.Path","boolean"],"exceptions":["java.io.IOException"]},"void copyToLocalFile(boolean, org.apache.hadoop.fs.Path, org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"copyToLocalFile","returnType":"void","args":["boolean","org.apache.hadoop.fs.Path","org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"boolean isChecksumFile(org.apache.hadoop.fs.Path)":{"name":"isChecksumFile","returnType":"boolean","args":["org.apache.hadoop.fs.Path"],"exceptions":[]},"void copyFromLocalFile(boolean, org.apache.hadoop.fs.Path, org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"copyFromLocalFile","returnType":"void","args":["boolean","org.apache.hadoop.fs.Path","org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.Path getChecksumFile(org.apache.hadoop.fs.Path)":{"name":"getChecksumFile","returnType":"org.apache.hadoop.fs.Path","args":["org.apache.hadoop.fs.Path"],"exceptions":[]},"void setConf(org.apache.hadoop.conf.Configuration)":{"name":"setConf","returnType":"void","args":["org.apache.hadoop.conf.Configuration"],"exceptions":[]},"org.apache.hadoop.fs.FSDataOutputStream createNonRecursive(org.apache.hadoop.fs.Path, org.apache.hadoop.fs.permission.FsPermission, boolean, int, short, long, org.apache.hadoop.util.Progressable) throws java.io.IOException":{"name":"createNonRecursive","returnType":"org.apache.hadoop.fs.FSDataOutputStream","args":["org.apache.hadoop.fs.Path","org.apache.hadoop.fs.permission.FsPermission","boolean","int","short","long","org.apache.hadoop.util.Progressable"],"exceptions":["java.io.IOException"]},"boolean truncate(org.apache.hadoop.fs.Path, long) throws java.io.IOException":{"name":"truncate","returnType":"boolean","args":["org.apache.hadoop.fs.Path","long"],"exceptions":["java.io.IOException"]},"void removeDefaultAcl(org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"removeDefaultAcl","returnType":"void","args":["org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.FSDataOutputStream create(org.apache.hadoop.fs.Path, org.apache.hadoop.fs.permission.FsPermission, boolean, int, short, long, org.apache.hadoop.util.Progressable) throws java.io.IOException":{"name":"create","returnType":"org.apache.hadoop.fs.FSDataOutputStream","args":["org.apache.hadoop.fs.Path","org.apache.hadoop.fs.permission.FsPermission","boolean","int","short","long","org.apache.hadoop.util.Progressable"],"exceptions":["java.io.IOException"]},"void removeAclEntries(org.apache.hadoop.fs.Path, java.util.List) throws java.io.IOException":{"name":"removeAclEntries","returnType":"void","args":["org.apache.hadoop.fs.Path","java.util.List"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.util.bloom.BloomFilter":{"name":"org.apache.hadoop.util.bloom.BloomFilter","methods":{"int getVectorSize()":{"name":"getVectorSize","returnType":"int","args":[],"exceptions":[]},"java.lang.String toString()":{"name":"toString","returnType":"java.lang.String","args":[],"exceptions":[]},"void write(java.io.DataOutput) throws java.io.IOException":{"name":"write","returnType":"void","args":["java.io.DataOutput"],"exceptions":["java.io.IOException"]},"void add(org.apache.hadoop.util.bloom.Key)":{"name":"add","returnType":"void","args":["org.apache.hadoop.util.bloom.Key"],"exceptions":[]},"void not()":{"name":"not","returnType":"void","args":[],"exceptions":[]},"boolean membershipTest(org.apache.hadoop.util.bloom.Key)":{"name":"membershipTest","returnType":"boolean","args":["org.apache.hadoop.util.bloom.Key"],"exceptions":[]},"void and(org.apache.hadoop.util.bloom.Filter)":{"name":"and","returnType":"void","args":["org.apache.hadoop.util.bloom.Filter"],"exceptions":[]},"void readFields(java.io.DataInput) throws java.io.IOException":{"name":"readFields","returnType":"void","args":["java.io.DataInput"],"exceptions":["java.io.IOException"]},"void or(org.apache.hadoop.util.bloom.Filter)":{"name":"or","returnType":"void","args":["org.apache.hadoop.util.bloom.Filter"],"exceptions":[]},"void xor(org.apache.hadoop.util.bloom.Filter)":{"name":"xor","returnType":"void","args":["org.apache.hadoop.util.bloom.Filter"],"exceptions":[]}}},"org.apache.hadoop.fs.ftp.FTPException":{"name":"org.apache.hadoop.fs.ftp.FTPException","methods":{}},"org.apache.hadoop.record.XmlRecordOutput":{"name":"org.apache.hadoop.record.XmlRecordOutput","methods":{"void writeBuffer(org.apache.hadoop.record.Buffer, java.lang.String) throws java.io.IOException":{"name":"writeBuffer","returnType":"void","args":["org.apache.hadoop.record.Buffer","java.lang.String"],"exceptions":["java.io.IOException"]},"void startMap(java.util.TreeMap, java.lang.String) throws java.io.IOException":{"name":"startMap","returnType":"void","args":["java.util.TreeMap","java.lang.String"],"exceptions":["java.io.IOException"]},"void writeBool(boolean, java.lang.String) throws java.io.IOException":{"name":"writeBool","returnType":"void","args":["boolean","java.lang.String"],"exceptions":["java.io.IOException"]},"void writeFloat(float, java.lang.String) throws java.io.IOException":{"name":"writeFloat","returnType":"void","args":["float","java.lang.String"],"exceptions":["java.io.IOException"]},"void writeInt(int, java.lang.String) throws java.io.IOException":{"name":"writeInt","returnType":"void","args":["int","java.lang.String"],"exceptions":["java.io.IOException"]},"void startRecord(org.apache.hadoop.record.Record, java.lang.String) throws java.io.IOException":{"name":"startRecord","returnType":"void","args":["org.apache.hadoop.record.Record","java.lang.String"],"exceptions":["java.io.IOException"]},"void endRecord(org.apache.hadoop.record.Record, java.lang.String) throws java.io.IOException":{"name":"endRecord","returnType":"void","args":["org.apache.hadoop.record.Record","java.lang.String"],"exceptions":["java.io.IOException"]},"void endVector(java.util.ArrayList, java.lang.String) throws java.io.IOException":{"name":"endVector","returnType":"void","args":["java.util.ArrayList","java.lang.String"],"exceptions":["java.io.IOException"]},"void endMap(java.util.TreeMap, java.lang.String) throws java.io.IOException":{"name":"endMap","returnType":"void","args":["java.util.TreeMap","java.lang.String"],"exceptions":["java.io.IOException"]},"void writeByte(byte, java.lang.String) throws java.io.IOException":{"name":"writeByte","returnType":"void","args":["byte","java.lang.String"],"exceptions":["java.io.IOException"]},"void startVector(java.util.ArrayList, java.lang.String) throws java.io.IOException":{"name":"startVector","returnType":"void","args":["java.util.ArrayList","java.lang.String"],"exceptions":["java.io.IOException"]},"void writeDouble(double, java.lang.String) throws java.io.IOException":{"name":"writeDouble","returnType":"void","args":["double","java.lang.String"],"exceptions":["java.io.IOException"]},"void writeLong(long, java.lang.String) throws java.io.IOException":{"name":"writeLong","returnType":"void","args":["long","java.lang.String"],"exceptions":["java.io.IOException"]},"void writeString(java.lang.String, java.lang.String) throws java.io.IOException":{"name":"writeString","returnType":"void","args":["java.lang.String","java.lang.String"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.record.Buffer":{"name":"org.apache.hadoop.record.Buffer","methods":{"int hashCode()":{"name":"hashCode","returnType":"int","args":[],"exceptions":[]},"[B get()":{"name":"get","returnType":"[B","args":[],"exceptions":[]},"int getCapacity()":{"name":"getCapacity","returnType":"int","args":[],"exceptions":[]},"void truncate()":{"name":"truncate","returnType":"void","args":[],"exceptions":[]},"void append([B)":{"name":"append","returnType":"void","args":["[B"],"exceptions":[]},"int compareTo(java.lang.Object)":{"name":"compareTo","returnType":"int","args":["java.lang.Object"],"exceptions":[]},"void setCapacity(int)":{"name":"setCapacity","returnType":"void","args":["int"],"exceptions":[]},"boolean equals(java.lang.Object)":{"name":"equals","returnType":"boolean","args":["java.lang.Object"],"exceptions":[]},"void append([B, int, int)":{"name":"append","returnType":"void","args":["[B","int","int"],"exceptions":[]},"java.lang.String toString(java.lang.String) throws java.io.UnsupportedEncodingException":{"name":"toString","returnType":"java.lang.String","args":["java.lang.String"],"exceptions":["java.io.UnsupportedEncodingException"]},"java.lang.Object clone() throws java.lang.CloneNotSupportedException":{"name":"clone","returnType":"java.lang.Object","args":[],"exceptions":["java.lang.CloneNotSupportedException"]},"java.lang.String toString()":{"name":"toString","returnType":"java.lang.String","args":[],"exceptions":[]},"void reset()":{"name":"reset","returnType":"void","args":[],"exceptions":[]},"int getCount()":{"name":"getCount","returnType":"int","args":[],"exceptions":[]},"void set([B)":{"name":"set","returnType":"void","args":["[B"],"exceptions":[]},"void copy([B, int, int)":{"name":"copy","returnType":"void","args":["[B","int","int"],"exceptions":[]}}},"org.apache.hadoop.io.ElasticByteBufferPool":{"name":"org.apache.hadoop.io.ElasticByteBufferPool","methods":{"void putBuffer(java.nio.ByteBuffer)":{"name":"putBuffer","returnType":"void","args":["java.nio.ByteBuffer"],"exceptions":[]},"java.nio.ByteBuffer getBuffer(boolean, int)":{"name":"getBuffer","returnType":"java.nio.ByteBuffer","args":["boolean","int"],"exceptions":[]}}},"org.apache.hadoop.record.compiler.JInt":{"name":"org.apache.hadoop.record.compiler.JInt","methods":{}},"org.apache.hadoop.io.WritableComparable":{"name":"org.apache.hadoop.io.WritableComparable","methods":{}},"org.apache.hadoop.service.ServiceStateChangeListener":{"name":"org.apache.hadoop.service.ServiceStateChangeListener","methods":{"void stateChanged(org.apache.hadoop.service.Service)":{"name":"stateChanged","returnType":"void","args":["org.apache.hadoop.service.Service"],"exceptions":[]}}},"org.apache.hadoop.metrics2.util.MBeans":{"name":"org.apache.hadoop.metrics2.util.MBeans","methods":{"void unregister(javax.management.ObjectName)":{"name":"unregister","returnType":"void","args":["javax.management.ObjectName"],"exceptions":[]},"javax.management.ObjectName register(java.lang.String, java.lang.String, java.lang.Object)":{"name":"register","returnType":"javax.management.ObjectName","args":["java.lang.String","java.lang.String","java.lang.Object"],"exceptions":[]}}},"org.apache.hadoop.record.compiler.JFile":{"name":"org.apache.hadoop.record.compiler.JFile","methods":{"int genCode(java.lang.String, java.lang.String, java.util.ArrayList) throws java.io.IOException":{"name":"genCode","returnType":"int","args":["java.lang.String","java.lang.String","java.util.ArrayList"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.io.WritableComparator":{"name":"org.apache.hadoop.io.WritableComparator","methods":{"long readLong([B, int)":{"name":"readLong","returnType":"long","args":["[B","int"],"exceptions":[]},"int readUnsignedShort([B, int)":{"name":"readUnsignedShort","returnType":"int","args":["[B","int"],"exceptions":[]},"int compare(java.lang.Object, java.lang.Object)":{"name":"compare","returnType":"int","args":["java.lang.Object","java.lang.Object"],"exceptions":[]},"int hashBytes([B, int)":{"name":"hashBytes","returnType":"int","args":["[B","int"],"exceptions":[]},"void define(java.lang.Class, org.apache.hadoop.io.WritableComparator)":{"name":"define","returnType":"void","args":["java.lang.Class","org.apache.hadoop.io.WritableComparator"],"exceptions":[]},"int compare(org.apache.hadoop.io.WritableComparable, org.apache.hadoop.io.WritableComparable)":{"name":"compare","returnType":"int","args":["org.apache.hadoop.io.WritableComparable","org.apache.hadoop.io.WritableComparable"],"exceptions":[]},"org.apache.hadoop.conf.Configuration getConf()":{"name":"getConf","returnType":"org.apache.hadoop.conf.Configuration","args":[],"exceptions":[]},"int compareBytes([B, int, int, [B, int, int)":{"name":"compareBytes","returnType":"int","args":["[B","int","int","[B","int","int"],"exceptions":[]},"org.apache.hadoop.io.WritableComparator get(java.lang.Class, org.apache.hadoop.conf.Configuration)":{"name":"get","returnType":"org.apache.hadoop.io.WritableComparator","args":["java.lang.Class","org.apache.hadoop.conf.Configuration"],"exceptions":[]},"float readFloat([B, int)":{"name":"readFloat","returnType":"float","args":["[B","int"],"exceptions":[]},"int hashBytes([B, int, int)":{"name":"hashBytes","returnType":"int","args":["[B","int","int"],"exceptions":[]},"long readVLong([B, int) throws java.io.IOException":{"name":"readVLong","returnType":"long","args":["[B","int"],"exceptions":["java.io.IOException"]},"java.lang.Class getKeyClass()":{"name":"getKeyClass","returnType":"java.lang.Class","args":[],"exceptions":[]},"int compare([B, int, int, [B, int, int)":{"name":"compare","returnType":"int","args":["[B","int","int","[B","int","int"],"exceptions":[]},"int readInt([B, int)":{"name":"readInt","returnType":"int","args":["[B","int"],"exceptions":[]},"void setConf(org.apache.hadoop.conf.Configuration)":{"name":"setConf","returnType":"void","args":["org.apache.hadoop.conf.Configuration"],"exceptions":[]},"org.apache.hadoop.io.WritableComparator get(java.lang.Class)":{"name":"get","returnType":"org.apache.hadoop.io.WritableComparator","args":["java.lang.Class"],"exceptions":[]},"int readVInt([B, int) throws java.io.IOException":{"name":"readVInt","returnType":"int","args":["[B","int"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.io.WritableComparable newKey()":{"name":"newKey","returnType":"org.apache.hadoop.io.WritableComparable","args":[],"exceptions":[]},"double readDouble([B, int)":{"name":"readDouble","returnType":"double","args":["[B","int"],"exceptions":[]}}},"org.apache.hadoop.io.Stringifier":{"name":"org.apache.hadoop.io.Stringifier","methods":{"java.lang.Object fromString(java.lang.String) throws java.io.IOException":{"name":"fromString","returnType":"java.lang.Object","args":["java.lang.String"],"exceptions":["java.io.IOException"]},"java.lang.String toString(java.lang.Object) throws java.io.IOException":{"name":"toString","returnType":"java.lang.String","args":["java.lang.Object"],"exceptions":["java.io.IOException"]},"void close() throws java.io.IOException":{"name":"close","returnType":"void","args":[],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.fs.permission.FsAction":{"name":"org.apache.hadoop.fs.permission.FsAction","methods":{"org.apache.hadoop.fs.permission.FsAction not()":{"name":"not","returnType":"org.apache.hadoop.fs.permission.FsAction","args":[],"exceptions":[]},"org.apache.hadoop.fs.permission.FsAction getFsAction(java.lang.String)":{"name":"getFsAction","returnType":"org.apache.hadoop.fs.permission.FsAction","args":["java.lang.String"],"exceptions":[]},"org.apache.hadoop.fs.permission.FsAction and(org.apache.hadoop.fs.permission.FsAction)":{"name":"and","returnType":"org.apache.hadoop.fs.permission.FsAction","args":["org.apache.hadoop.fs.permission.FsAction"],"exceptions":[]},"[Lorg.apache.hadoop.fs.permission.FsAction; values()":{"name":"values","returnType":"[Lorg.apache.hadoop.fs.permission.FsAction;","args":[],"exceptions":[]},"org.apache.hadoop.fs.permission.FsAction valueOf(java.lang.String)":{"name":"valueOf","returnType":"org.apache.hadoop.fs.permission.FsAction","args":["java.lang.String"],"exceptions":[]},"boolean implies(org.apache.hadoop.fs.permission.FsAction)":{"name":"implies","returnType":"boolean","args":["org.apache.hadoop.fs.permission.FsAction"],"exceptions":[]},"org.apache.hadoop.fs.permission.FsAction or(org.apache.hadoop.fs.permission.FsAction)":{"name":"or","returnType":"org.apache.hadoop.fs.permission.FsAction","args":["org.apache.hadoop.fs.permission.FsAction"],"exceptions":[]}}},"org.apache.hadoop.io.ObjectWritable":{"name":"org.apache.hadoop.io.ObjectWritable","methods":{"void writeObject(java.io.DataOutput, java.lang.Object, java.lang.Class, org.apache.hadoop.conf.Configuration, boolean) throws java.io.IOException":{"name":"writeObject","returnType":"void","args":["java.io.DataOutput","java.lang.Object","java.lang.Class","org.apache.hadoop.conf.Configuration","boolean"],"exceptions":["java.io.IOException"]},"java.lang.Object readObject(java.io.DataInput, org.apache.hadoop.io.ObjectWritable, org.apache.hadoop.conf.Configuration) throws java.io.IOException":{"name":"readObject","returnType":"java.lang.Object","args":["java.io.DataInput","org.apache.hadoop.io.ObjectWritable","org.apache.hadoop.conf.Configuration"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.conf.Configuration getConf()":{"name":"getConf","returnType":"org.apache.hadoop.conf.Configuration","args":[],"exceptions":[]},"void set(java.lang.Object)":{"name":"set","returnType":"void","args":["java.lang.Object"],"exceptions":[]},"java.lang.Class loadClass(org.apache.hadoop.conf.Configuration, java.lang.String)":{"name":"loadClass","returnType":"java.lang.Class","args":["org.apache.hadoop.conf.Configuration","java.lang.String"],"exceptions":[]},"java.lang.Object readObject(java.io.DataInput, org.apache.hadoop.conf.Configuration) throws java.io.IOException":{"name":"readObject","returnType":"java.lang.Object","args":["java.io.DataInput","org.apache.hadoop.conf.Configuration"],"exceptions":["java.io.IOException"]},"java.lang.String toString()":{"name":"toString","returnType":"java.lang.String","args":[],"exceptions":[]},"void writeObject(java.io.DataOutput, java.lang.Object, java.lang.Class, org.apache.hadoop.conf.Configuration) throws java.io.IOException":{"name":"writeObject","returnType":"void","args":["java.io.DataOutput","java.lang.Object","java.lang.Class","org.apache.hadoop.conf.Configuration"],"exceptions":["java.io.IOException"]},"void write(java.io.DataOutput) throws java.io.IOException":{"name":"write","returnType":"void","args":["java.io.DataOutput"],"exceptions":["java.io.IOException"]},"java.lang.Class getDeclaredClass()":{"name":"getDeclaredClass","returnType":"java.lang.Class","args":[],"exceptions":[]},"void setConf(org.apache.hadoop.conf.Configuration)":{"name":"setConf","returnType":"void","args":["org.apache.hadoop.conf.Configuration"],"exceptions":[]},"java.lang.Object get()":{"name":"get","returnType":"java.lang.Object","args":[],"exceptions":[]},"void readFields(java.io.DataInput) throws java.io.IOException":{"name":"readFields","returnType":"void","args":["java.io.DataInput"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.fs.FSDataOutputStream":{"name":"org.apache.hadoop.fs.FSDataOutputStream","methods":{"void hflush() throws java.io.IOException":{"name":"hflush","returnType":"void","args":[],"exceptions":["java.io.IOException"]},"long getPos() throws java.io.IOException":{"name":"getPos","returnType":"long","args":[],"exceptions":["java.io.IOException"]},"void sync() throws java.io.IOException":{"name":"sync","returnType":"void","args":[],"exceptions":["java.io.IOException"]},"void hsync() throws java.io.IOException":{"name":"hsync","returnType":"void","args":[],"exceptions":["java.io.IOException"]},"void setDropBehind(java.lang.Boolean) throws java.io.IOException":{"name":"setDropBehind","returnType":"void","args":["java.lang.Boolean"],"exceptions":["java.io.IOException"]},"java.io.OutputStream getWrappedStream()":{"name":"getWrappedStream","returnType":"java.io.OutputStream","args":[],"exceptions":[]},"void close() throws java.io.IOException":{"name":"close","returnType":"void","args":[],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.fs.XAttrCodec":{"name":"org.apache.hadoop.fs.XAttrCodec","methods":{"[Lorg.apache.hadoop.fs.XAttrCodec; values()":{"name":"values","returnType":"[Lorg.apache.hadoop.fs.XAttrCodec;","args":[],"exceptions":[]},"java.lang.String encodeValue([B, org.apache.hadoop.fs.XAttrCodec) throws java.io.IOException":{"name":"encodeValue","returnType":"java.lang.String","args":["[B","org.apache.hadoop.fs.XAttrCodec"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.XAttrCodec valueOf(java.lang.String)":{"name":"valueOf","returnType":"org.apache.hadoop.fs.XAttrCodec","args":["java.lang.String"],"exceptions":[]},"[B decodeValue(java.lang.String) throws java.io.IOException":{"name":"decodeValue","returnType":"[B","args":["java.lang.String"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.record.compiler.JLong":{"name":"org.apache.hadoop.record.compiler.JLong","methods":{}},"org.apache.hadoop.fs.FilterFileSystem":{"name":"org.apache.hadoop.fs.FilterFileSystem","methods":{"void concat(org.apache.hadoop.fs.Path, [Lorg.apache.hadoop.fs.Path;) throws java.io.IOException":{"name":"concat","returnType":"void","args":["org.apache.hadoop.fs.Path","[Lorg.apache.hadoop.fs.Path;"],"exceptions":["java.io.IOException"]},"void initialize(java.net.URI, org.apache.hadoop.conf.Configuration) throws java.io.IOException":{"name":"initialize","returnType":"void","args":["java.net.URI","org.apache.hadoop.conf.Configuration"],"exceptions":["java.io.IOException"]},"short getDefaultReplication(org.apache.hadoop.fs.Path)":{"name":"getDefaultReplication","returnType":"short","args":["org.apache.hadoop.fs.Path"],"exceptions":[]},"[B getXAttr(org.apache.hadoop.fs.Path, java.lang.String) throws java.io.IOException":{"name":"getXAttr","returnType":"[B","args":["org.apache.hadoop.fs.Path","java.lang.String"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.Path makeQualified(org.apache.hadoop.fs.Path)":{"name":"makeQualified","returnType":"org.apache.hadoop.fs.Path","args":["org.apache.hadoop.fs.Path"],"exceptions":[]},"void setTimes(org.apache.hadoop.fs.Path, long, long) throws java.io.IOException":{"name":"setTimes","returnType":"void","args":["org.apache.hadoop.fs.Path","long","long"],"exceptions":["java.io.IOException"]},"long getUsed() throws java.io.IOException":{"name":"getUsed","returnType":"long","args":[],"exceptions":["java.io.IOException"]},"boolean setReplication(org.apache.hadoop.fs.Path, short) throws java.io.IOException":{"name":"setReplication","returnType":"boolean","args":["org.apache.hadoop.fs.Path","short"],"exceptions":["java.io.IOException"]},"[Lorg.apache.hadoop.fs.FileSystem; getChildFileSystems()":{"name":"getChildFileSystems","returnType":"[Lorg.apache.hadoop.fs.FileSystem;","args":[],"exceptions":[]},"[Lorg.apache.hadoop.fs.FileStatus; listStatus(org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"listStatus","returnType":"[Lorg.apache.hadoop.fs.FileStatus;","args":["org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"java.util.Map getXAttrs(org.apache.hadoop.fs.Path, java.util.List) throws java.io.IOException":{"name":"getXAttrs","returnType":"java.util.Map","args":["org.apache.hadoop.fs.Path","java.util.List"],"exceptions":["java.io.IOException"]},"void setVerifyChecksum(boolean)":{"name":"setVerifyChecksum","returnType":"void","args":["boolean"],"exceptions":[]},"org.apache.hadoop.fs.FileStatus getFileLinkStatus(org.apache.hadoop.fs.Path) throws org.apache.hadoop.security.AccessControlException, org.apache.hadoop.fs.UnsupportedFileSystemException, java.io.IOException, java.io.FileNotFoundException":{"name":"getFileLinkStatus","returnType":"org.apache.hadoop.fs.FileStatus","args":["org.apache.hadoop.fs.Path"],"exceptions":["org.apache.hadoop.security.AccessControlException","org.apache.hadoop.fs.UnsupportedFileSystemException","java.io.IOException","java.io.FileNotFoundException"]},"void copyFromLocalFile(boolean, boolean, org.apache.hadoop.fs.Path, org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"copyFromLocalFile","returnType":"void","args":["boolean","boolean","org.apache.hadoop.fs.Path","org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"void renameSnapshot(org.apache.hadoop.fs.Path, java.lang.String, java.lang.String) throws java.io.IOException":{"name":"renameSnapshot","returnType":"void","args":["org.apache.hadoop.fs.Path","java.lang.String","java.lang.String"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.FsStatus getStatus(org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"getStatus","returnType":"org.apache.hadoop.fs.FsStatus","args":["org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.FSDataOutputStream createNonRecursive(org.apache.hadoop.fs.Path, org.apache.hadoop.fs.permission.FsPermission, java.util.EnumSet, int, short, long, org.apache.hadoop.util.Progressable) throws java.io.IOException":{"name":"createNonRecursive","returnType":"org.apache.hadoop.fs.FSDataOutputStream","args":["org.apache.hadoop.fs.Path","org.apache.hadoop.fs.permission.FsPermission","java.util.EnumSet","int","short","long","org.apache.hadoop.util.Progressable"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.Path getLinkTarget(org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"getLinkTarget","returnType":"org.apache.hadoop.fs.Path","args":["org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.Path createSnapshot(org.apache.hadoop.fs.Path, java.lang.String) throws java.io.IOException":{"name":"createSnapshot","returnType":"org.apache.hadoop.fs.Path","args":["org.apache.hadoop.fs.Path","java.lang.String"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.RemoteIterator listStatusIterator(org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"listStatusIterator","returnType":"org.apache.hadoop.fs.RemoteIterator","args":["org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"void removeAcl(org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"removeAcl","returnType":"void","args":["org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"void createSymlink(org.apache.hadoop.fs.Path, org.apache.hadoop.fs.Path, boolean) throws org.apache.hadoop.fs.FileAlreadyExistsException, org.apache.hadoop.security.AccessControlException, org.apache.hadoop.fs.ParentNotDirectoryException, org.apache.hadoop.fs.UnsupportedFileSystemException, java.io.IOException, java.io.FileNotFoundException":{"name":"createSymlink","returnType":"void","args":["org.apache.hadoop.fs.Path","org.apache.hadoop.fs.Path","boolean"],"exceptions":["org.apache.hadoop.fs.FileAlreadyExistsException","org.apache.hadoop.security.AccessControlException","org.apache.hadoop.fs.ParentNotDirectoryException","org.apache.hadoop.fs.UnsupportedFileSystemException","java.io.IOException","java.io.FileNotFoundException"]},"boolean delete(org.apache.hadoop.fs.Path, boolean) throws java.io.IOException":{"name":"delete","returnType":"boolean","args":["org.apache.hadoop.fs.Path","boolean"],"exceptions":["java.io.IOException"]},"boolean mkdirs(org.apache.hadoop.fs.Path, org.apache.hadoop.fs.permission.FsPermission) throws java.io.IOException":{"name":"mkdirs","returnType":"boolean","args":["org.apache.hadoop.fs.Path","org.apache.hadoop.fs.permission.FsPermission"],"exceptions":["java.io.IOException"]},"void copyFromLocalFile(boolean, org.apache.hadoop.fs.Path, org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"copyFromLocalFile","returnType":"void","args":["boolean","org.apache.hadoop.fs.Path","org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"void access(org.apache.hadoop.fs.Path, org.apache.hadoop.fs.permission.FsAction) throws org.apache.hadoop.security.AccessControlException, java.io.IOException, java.io.FileNotFoundException":{"name":"access","returnType":"void","args":["org.apache.hadoop.fs.Path","org.apache.hadoop.fs.permission.FsAction"],"exceptions":["org.apache.hadoop.security.AccessControlException","java.io.IOException","java.io.FileNotFoundException"]},"void removeAclEntries(org.apache.hadoop.fs.Path, java.util.List) throws java.io.IOException":{"name":"removeAclEntries","returnType":"void","args":["org.apache.hadoop.fs.Path","java.util.List"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.permission.AclStatus getAclStatus(org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"getAclStatus","returnType":"org.apache.hadoop.fs.permission.AclStatus","args":["org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"void setOwner(org.apache.hadoop.fs.Path, java.lang.String, java.lang.String) throws java.io.IOException":{"name":"setOwner","returnType":"void","args":["org.apache.hadoop.fs.Path","java.lang.String","java.lang.String"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.FsServerDefaults getServerDefaults(org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"getServerDefaults","returnType":"org.apache.hadoop.fs.FsServerDefaults","args":["org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.conf.Configuration getConf()":{"name":"getConf","returnType":"org.apache.hadoop.conf.Configuration","args":[],"exceptions":[]},"void deleteSnapshot(org.apache.hadoop.fs.Path, java.lang.String) throws java.io.IOException":{"name":"deleteSnapshot","returnType":"void","args":["org.apache.hadoop.fs.Path","java.lang.String"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.FSDataOutputStream append(org.apache.hadoop.fs.Path, int, org.apache.hadoop.util.Progressable) throws java.io.IOException":{"name":"append","returnType":"org.apache.hadoop.fs.FSDataOutputStream","args":["org.apache.hadoop.fs.Path","int","org.apache.hadoop.util.Progressable"],"exceptions":["java.io.IOException"]},"long getDefaultBlockSize()":{"name":"getDefaultBlockSize","returnType":"long","args":[],"exceptions":[]},"org.apache.hadoop.fs.FSDataInputStream open(org.apache.hadoop.fs.Path, int) throws java.io.IOException":{"name":"open","returnType":"org.apache.hadoop.fs.FSDataInputStream","args":["org.apache.hadoop.fs.Path","int"],"exceptions":["java.io.IOException"]},"java.net.URI getUri()":{"name":"getUri","returnType":"java.net.URI","args":[],"exceptions":[]},"org.apache.hadoop.fs.Path getHomeDirectory()":{"name":"getHomeDirectory","returnType":"org.apache.hadoop.fs.Path","args":[],"exceptions":[]},"void completeLocalOutput(org.apache.hadoop.fs.Path, org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"completeLocalOutput","returnType":"void","args":["org.apache.hadoop.fs.Path","org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"boolean rename(org.apache.hadoop.fs.Path, org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"rename","returnType":"boolean","args":["org.apache.hadoop.fs.Path","org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.FileStatus getFileStatus(org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"getFileStatus","returnType":"org.apache.hadoop.fs.FileStatus","args":["org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.FileChecksum getFileChecksum(org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"getFileChecksum","returnType":"org.apache.hadoop.fs.FileChecksum","args":["org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"void removeXAttr(org.apache.hadoop.fs.Path, java.lang.String) throws java.io.IOException":{"name":"removeXAttr","returnType":"void","args":["org.apache.hadoop.fs.Path","java.lang.String"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.RemoteIterator listCorruptFileBlocks(org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"listCorruptFileBlocks","returnType":"org.apache.hadoop.fs.RemoteIterator","args":["org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.Path getWorkingDirectory()":{"name":"getWorkingDirectory","returnType":"org.apache.hadoop.fs.Path","args":[],"exceptions":[]},"java.util.List listXAttrs(org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"listXAttrs","returnType":"java.util.List","args":["org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"void setAcl(org.apache.hadoop.fs.Path, java.util.List) throws java.io.IOException":{"name":"setAcl","returnType":"void","args":["org.apache.hadoop.fs.Path","java.util.List"],"exceptions":["java.io.IOException"]},"void setWorkingDirectory(org.apache.hadoop.fs.Path)":{"name":"setWorkingDirectory","returnType":"void","args":["org.apache.hadoop.fs.Path"],"exceptions":[]},"org.apache.hadoop.fs.FileChecksum getFileChecksum(org.apache.hadoop.fs.Path, long) throws java.io.IOException":{"name":"getFileChecksum","returnType":"org.apache.hadoop.fs.FileChecksum","args":["org.apache.hadoop.fs.Path","long"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.RemoteIterator listLocatedStatus(org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"listLocatedStatus","returnType":"org.apache.hadoop.fs.RemoteIterator","args":["org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"void copyFromLocalFile(boolean, boolean, [Lorg.apache.hadoop.fs.Path;, org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"copyFromLocalFile","returnType":"void","args":["boolean","boolean","[Lorg.apache.hadoop.fs.Path;","org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"void setPermission(org.apache.hadoop.fs.Path, org.apache.hadoop.fs.permission.FsPermission) throws java.io.IOException":{"name":"setPermission","returnType":"void","args":["org.apache.hadoop.fs.Path","org.apache.hadoop.fs.permission.FsPermission"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.Path resolvePath(org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"resolvePath","returnType":"org.apache.hadoop.fs.Path","args":["org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"java.util.Map getXAttrs(org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"getXAttrs","returnType":"java.util.Map","args":["org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.FileSystem getRawFileSystem()":{"name":"getRawFileSystem","returnType":"org.apache.hadoop.fs.FileSystem","args":[],"exceptions":[]},"org.apache.hadoop.fs.Path startLocalOutput(org.apache.hadoop.fs.Path, org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"startLocalOutput","returnType":"org.apache.hadoop.fs.Path","args":["org.apache.hadoop.fs.Path","org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"void setWriteChecksum(boolean)":{"name":"setWriteChecksum","returnType":"void","args":["boolean"],"exceptions":[]},"void modifyAclEntries(org.apache.hadoop.fs.Path, java.util.List) throws java.io.IOException":{"name":"modifyAclEntries","returnType":"void","args":["org.apache.hadoop.fs.Path","java.util.List"],"exceptions":["java.io.IOException"]},"short getDefaultReplication()":{"name":"getDefaultReplication","returnType":"short","args":[],"exceptions":[]},"void setXAttr(org.apache.hadoop.fs.Path, java.lang.String, [B) throws java.io.IOException":{"name":"setXAttr","returnType":"void","args":["org.apache.hadoop.fs.Path","java.lang.String","[B"],"exceptions":["java.io.IOException"]},"void copyToLocalFile(boolean, org.apache.hadoop.fs.Path, org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"copyToLocalFile","returnType":"void","args":["boolean","org.apache.hadoop.fs.Path","org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"[Lorg.apache.hadoop.fs.BlockLocation; getFileBlockLocations(org.apache.hadoop.fs.FileStatus, long, long) throws java.io.IOException":{"name":"getFileBlockLocations","returnType":"[Lorg.apache.hadoop.fs.BlockLocation;","args":["org.apache.hadoop.fs.FileStatus","long","long"],"exceptions":["java.io.IOException"]},"void setXAttr(org.apache.hadoop.fs.Path, java.lang.String, [B, java.util.EnumSet) throws java.io.IOException":{"name":"setXAttr","returnType":"void","args":["org.apache.hadoop.fs.Path","java.lang.String","[B","java.util.EnumSet"],"exceptions":["java.io.IOException"]},"boolean supportsSymlinks()":{"name":"supportsSymlinks","returnType":"boolean","args":[],"exceptions":[]},"org.apache.hadoop.fs.FSDataOutputStream create(org.apache.hadoop.fs.Path, org.apache.hadoop.fs.permission.FsPermission, java.util.EnumSet, int, short, long, org.apache.hadoop.util.Progressable, org.apache.hadoop.fs.Options$ChecksumOpt) throws java.io.IOException":{"name":"create","returnType":"org.apache.hadoop.fs.FSDataOutputStream","args":["org.apache.hadoop.fs.Path","org.apache.hadoop.fs.permission.FsPermission","java.util.EnumSet","int","short","long","org.apache.hadoop.util.Progressable","org.apache.hadoop.fs.Options$ChecksumOpt"],"exceptions":["java.io.IOException"]},"boolean truncate(org.apache.hadoop.fs.Path, long) throws java.io.IOException":{"name":"truncate","returnType":"boolean","args":["org.apache.hadoop.fs.Path","long"],"exceptions":["java.io.IOException"]},"void removeDefaultAcl(org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"removeDefaultAcl","returnType":"void","args":["org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.FSDataOutputStream create(org.apache.hadoop.fs.Path, org.apache.hadoop.fs.permission.FsPermission, boolean, int, short, long, org.apache.hadoop.util.Progressable) throws java.io.IOException":{"name":"create","returnType":"org.apache.hadoop.fs.FSDataOutputStream","args":["org.apache.hadoop.fs.Path","org.apache.hadoop.fs.permission.FsPermission","boolean","int","short","long","org.apache.hadoop.util.Progressable"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.FsServerDefaults getServerDefaults() throws java.io.IOException":{"name":"getServerDefaults","returnType":"org.apache.hadoop.fs.FsServerDefaults","args":[],"exceptions":["java.io.IOException"]},"long getDefaultBlockSize(org.apache.hadoop.fs.Path)":{"name":"getDefaultBlockSize","returnType":"long","args":["org.apache.hadoop.fs.Path"],"exceptions":[]},"void close() throws java.io.IOException":{"name":"close","returnType":"void","args":[],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.fs.FSError":{"name":"org.apache.hadoop.fs.FSError","methods":{}},"org.apache.hadoop.record.compiler.JRecord":{"name":"org.apache.hadoop.record.compiler.JRecord","methods":{}},"org.apache.hadoop.util.PureJavaCrc32C":{"name":"org.apache.hadoop.util.PureJavaCrc32C","methods":{"void update([B, int, int)":{"name":"update","returnType":"void","args":["[B","int","int"],"exceptions":[]},"long getValue()":{"name":"getValue","returnType":"long","args":[],"exceptions":[]},"void reset()":{"name":"reset","returnType":"void","args":[],"exceptions":[]},"void update(int)":{"name":"update","returnType":"void","args":["int"],"exceptions":[]}}},"org.apache.hadoop.io.BloomMapFile":{"name":"org.apache.hadoop.io.BloomMapFile","methods":{"void delete(org.apache.hadoop.fs.FileSystem, java.lang.String) throws java.io.IOException":{"name":"delete","returnType":"void","args":["org.apache.hadoop.fs.FileSystem","java.lang.String"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.record.meta.StructTypeID":{"name":"org.apache.hadoop.record.meta.StructTypeID","methods":{"int hashCode()":{"name":"hashCode","returnType":"int","args":[],"exceptions":[]},"java.util.Collection getFieldTypeInfos()":{"name":"getFieldTypeInfos","returnType":"java.util.Collection","args":[],"exceptions":[]},"boolean equals(java.lang.Object)":{"name":"equals","returnType":"boolean","args":["java.lang.Object"],"exceptions":[]}}},"org.apache.hadoop.record.meta.VectorTypeID":{"name":"org.apache.hadoop.record.meta.VectorTypeID","methods":{"int hashCode()":{"name":"hashCode","returnType":"int","args":[],"exceptions":[]},"org.apache.hadoop.record.meta.TypeID getElementTypeID()":{"name":"getElementTypeID","returnType":"org.apache.hadoop.record.meta.TypeID","args":[],"exceptions":[]},"boolean equals(java.lang.Object)":{"name":"equals","returnType":"boolean","args":["java.lang.Object"],"exceptions":[]}}},"org.apache.hadoop.fs.XAttrSetFlag":{"name":"org.apache.hadoop.fs.XAttrSetFlag","methods":{"[Lorg.apache.hadoop.fs.XAttrSetFlag; values()":{"name":"values","returnType":"[Lorg.apache.hadoop.fs.XAttrSetFlag;","args":[],"exceptions":[]},"org.apache.hadoop.fs.XAttrSetFlag valueOf(java.lang.String)":{"name":"valueOf","returnType":"org.apache.hadoop.fs.XAttrSetFlag","args":["java.lang.String"],"exceptions":[]},"void validate(java.lang.String, boolean, java.util.EnumSet) throws java.io.IOException":{"name":"validate","returnType":"void","args":["java.lang.String","boolean","java.util.EnumSet"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.util.StringInterner":{"name":"org.apache.hadoop.util.StringInterner","methods":{"java.lang.String strongIntern(java.lang.String)":{"name":"strongIntern","returnType":"java.lang.String","args":["java.lang.String"],"exceptions":[]},"java.lang.String weakIntern(java.lang.String)":{"name":"weakIntern","returnType":"java.lang.String","args":["java.lang.String"],"exceptions":[]}}},"org.apache.hadoop.io.file.tfile.MetaBlockDoesNotExist":{"name":"org.apache.hadoop.io.file.tfile.MetaBlockDoesNotExist","methods":{}},"org.apache.hadoop.fs.FileAlreadyExistsException":{"name":"org.apache.hadoop.fs.FileAlreadyExistsException","methods":{}},"org.apache.hadoop.io.AbstractMapWritable":{"name":"org.apache.hadoop.io.AbstractMapWritable","methods":{"void write(java.io.DataOutput) throws java.io.IOException":{"name":"write","returnType":"void","args":["java.io.DataOutput"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.conf.Configuration getConf()":{"name":"getConf","returnType":"org.apache.hadoop.conf.Configuration","args":[],"exceptions":[]},"void setConf(org.apache.hadoop.conf.Configuration)":{"name":"setConf","returnType":"void","args":["org.apache.hadoop.conf.Configuration"],"exceptions":[]},"void readFields(java.io.DataInput) throws java.io.IOException":{"name":"readFields","returnType":"void","args":["java.io.DataInput"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.fs.RawLocalFileSystem":{"name":"org.apache.hadoop.fs.RawLocalFileSystem","methods":{"void createSymlink(org.apache.hadoop.fs.Path, org.apache.hadoop.fs.Path, boolean) throws java.io.IOException":{"name":"createSymlink","returnType":"void","args":["org.apache.hadoop.fs.Path","org.apache.hadoop.fs.Path","boolean"],"exceptions":["java.io.IOException"]},"void initialize(java.net.URI, org.apache.hadoop.conf.Configuration) throws java.io.IOException":{"name":"initialize","returnType":"void","args":["java.net.URI","org.apache.hadoop.conf.Configuration"],"exceptions":["java.io.IOException"]},"void setOwner(org.apache.hadoop.fs.Path, java.lang.String, java.lang.String) throws java.io.IOException":{"name":"setOwner","returnType":"void","args":["org.apache.hadoop.fs.Path","java.lang.String","java.lang.String"],"exceptions":["java.io.IOException"]},"java.io.File pathToFile(org.apache.hadoop.fs.Path)":{"name":"pathToFile","returnType":"java.io.File","args":["org.apache.hadoop.fs.Path"],"exceptions":[]},"void moveFromLocalFile(org.apache.hadoop.fs.Path, org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"moveFromLocalFile","returnType":"void","args":["org.apache.hadoop.fs.Path","org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.FSDataOutputStream append(org.apache.hadoop.fs.Path, int, org.apache.hadoop.util.Progressable) throws java.io.IOException":{"name":"append","returnType":"org.apache.hadoop.fs.FSDataOutputStream","args":["org.apache.hadoop.fs.Path","int","org.apache.hadoop.util.Progressable"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.FSDataInputStream open(org.apache.hadoop.fs.Path, int) throws java.io.IOException":{"name":"open","returnType":"org.apache.hadoop.fs.FSDataInputStream","args":["org.apache.hadoop.fs.Path","int"],"exceptions":["java.io.IOException"]},"void setTimes(org.apache.hadoop.fs.Path, long, long) throws java.io.IOException":{"name":"setTimes","returnType":"void","args":["org.apache.hadoop.fs.Path","long","long"],"exceptions":["java.io.IOException"]},"java.net.URI getUri()":{"name":"getUri","returnType":"java.net.URI","args":[],"exceptions":[]},"org.apache.hadoop.fs.Path getHomeDirectory()":{"name":"getHomeDirectory","returnType":"org.apache.hadoop.fs.Path","args":[],"exceptions":[]},"void completeLocalOutput(org.apache.hadoop.fs.Path, org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"completeLocalOutput","returnType":"void","args":["org.apache.hadoop.fs.Path","org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.FSDataOutputStream create(org.apache.hadoop.fs.Path, boolean, int, short, long, org.apache.hadoop.util.Progressable) throws java.io.IOException":{"name":"create","returnType":"org.apache.hadoop.fs.FSDataOutputStream","args":["org.apache.hadoop.fs.Path","boolean","int","short","long","org.apache.hadoop.util.Progressable"],"exceptions":["java.io.IOException"]},"boolean rename(org.apache.hadoop.fs.Path, org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"rename","returnType":"boolean","args":["org.apache.hadoop.fs.Path","org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.FileStatus getFileStatus(org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"getFileStatus","returnType":"org.apache.hadoop.fs.FileStatus","args":["org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"java.lang.String toString()":{"name":"toString","returnType":"java.lang.String","args":[],"exceptions":[]},"[Lorg.apache.hadoop.fs.FileStatus; listStatus(org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"listStatus","returnType":"[Lorg.apache.hadoop.fs.FileStatus;","args":["org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.Path getWorkingDirectory()":{"name":"getWorkingDirectory","returnType":"org.apache.hadoop.fs.Path","args":[],"exceptions":[]},"void setWorkingDirectory(org.apache.hadoop.fs.Path)":{"name":"setWorkingDirectory","returnType":"void","args":["org.apache.hadoop.fs.Path"],"exceptions":[]},"void setPermission(org.apache.hadoop.fs.Path, org.apache.hadoop.fs.permission.FsPermission) throws java.io.IOException":{"name":"setPermission","returnType":"void","args":["org.apache.hadoop.fs.Path","org.apache.hadoop.fs.permission.FsPermission"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.Path startLocalOutput(org.apache.hadoop.fs.Path, org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"startLocalOutput","returnType":"org.apache.hadoop.fs.Path","args":["org.apache.hadoop.fs.Path","org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.FsStatus getStatus(org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"getStatus","returnType":"org.apache.hadoop.fs.FsStatus","args":["org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.FSDataOutputStream createNonRecursive(org.apache.hadoop.fs.Path, org.apache.hadoop.fs.permission.FsPermission, java.util.EnumSet, int, short, long, org.apache.hadoop.util.Progressable) throws java.io.IOException":{"name":"createNonRecursive","returnType":"org.apache.hadoop.fs.FSDataOutputStream","args":["org.apache.hadoop.fs.Path","org.apache.hadoop.fs.permission.FsPermission","java.util.EnumSet","int","short","long","org.apache.hadoop.util.Progressable"],"exceptions":["java.io.IOException"]},"void useStatIfAvailable()":{"name":"useStatIfAvailable","returnType":"void","args":[],"exceptions":[]},"boolean mkdirs(org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"mkdirs","returnType":"boolean","args":["org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.Path getLinkTarget(org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"getLinkTarget","returnType":"org.apache.hadoop.fs.Path","args":["org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.FileStatus getFileLinkStatus(org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"getFileLinkStatus","returnType":"org.apache.hadoop.fs.FileStatus","args":["org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"boolean delete(org.apache.hadoop.fs.Path, boolean) throws java.io.IOException":{"name":"delete","returnType":"boolean","args":["org.apache.hadoop.fs.Path","boolean"],"exceptions":["java.io.IOException"]},"boolean mkdirs(org.apache.hadoop.fs.Path, org.apache.hadoop.fs.permission.FsPermission) throws java.io.IOException":{"name":"mkdirs","returnType":"boolean","args":["org.apache.hadoop.fs.Path","org.apache.hadoop.fs.permission.FsPermission"],"exceptions":["java.io.IOException"]},"boolean supportsSymlinks()":{"name":"supportsSymlinks","returnType":"boolean","args":[],"exceptions":[]},"org.apache.hadoop.fs.FSDataOutputStream createNonRecursive(org.apache.hadoop.fs.Path, org.apache.hadoop.fs.permission.FsPermission, boolean, int, short, long, org.apache.hadoop.util.Progressable) throws java.io.IOException":{"name":"createNonRecursive","returnType":"org.apache.hadoop.fs.FSDataOutputStream","args":["org.apache.hadoop.fs.Path","org.apache.hadoop.fs.permission.FsPermission","boolean","int","short","long","org.apache.hadoop.util.Progressable"],"exceptions":["java.io.IOException"]},"boolean truncate(org.apache.hadoop.fs.Path, long) throws java.io.IOException":{"name":"truncate","returnType":"boolean","args":["org.apache.hadoop.fs.Path","long"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.FSDataOutputStream create(org.apache.hadoop.fs.Path, org.apache.hadoop.fs.permission.FsPermission, boolean, int, short, long, org.apache.hadoop.util.Progressable) throws java.io.IOException":{"name":"create","returnType":"org.apache.hadoop.fs.FSDataOutputStream","args":["org.apache.hadoop.fs.Path","org.apache.hadoop.fs.permission.FsPermission","boolean","int","short","long","org.apache.hadoop.util.Progressable"],"exceptions":["java.io.IOException"]},"void close() throws java.io.IOException":{"name":"close","returnType":"void","args":[],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.record.compiler.generated.RccTokenManager":{"name":"org.apache.hadoop.record.compiler.generated.RccTokenManager","methods":{"void SwitchTo(int)":{"name":"SwitchTo","returnType":"void","args":["int"],"exceptions":[]},"void ReInit(org.apache.hadoop.record.compiler.generated.SimpleCharStream)":{"name":"ReInit","returnType":"void","args":["org.apache.hadoop.record.compiler.generated.SimpleCharStream"],"exceptions":[]},"void setDebugStream(java.io.PrintStream)":{"name":"setDebugStream","returnType":"void","args":["java.io.PrintStream"],"exceptions":[]},"org.apache.hadoop.record.compiler.generated.Token getNextToken()":{"name":"getNextToken","returnType":"org.apache.hadoop.record.compiler.generated.Token","args":[],"exceptions":[]},"void ReInit(org.apache.hadoop.record.compiler.generated.SimpleCharStream, int)":{"name":"ReInit","returnType":"void","args":["org.apache.hadoop.record.compiler.generated.SimpleCharStream","int"],"exceptions":[]}}},"org.apache.hadoop.record.Utils":{"name":"org.apache.hadoop.record.Utils","methods":{"int readVInt(java.io.DataInput) throws java.io.IOException":{"name":"readVInt","returnType":"int","args":["java.io.DataInput"],"exceptions":["java.io.IOException"]},"void writeVInt(java.io.DataOutput, int) throws java.io.IOException":{"name":"writeVInt","returnType":"void","args":["java.io.DataOutput","int"],"exceptions":["java.io.IOException"]},"long readVLong(java.io.DataInput) throws java.io.IOException":{"name":"readVLong","returnType":"long","args":["java.io.DataInput"],"exceptions":["java.io.IOException"]},"int compareBytes([B, int, int, [B, int, int)":{"name":"compareBytes","returnType":"int","args":["[B","int","int","[B","int","int"],"exceptions":[]},"float readFloat([B, int)":{"name":"readFloat","returnType":"float","args":["[B","int"],"exceptions":[]},"long readVLong([B, int) throws java.io.IOException":{"name":"readVLong","returnType":"long","args":["[B","int"],"exceptions":["java.io.IOException"]},"int readVInt([B, int) throws java.io.IOException":{"name":"readVInt","returnType":"int","args":["[B","int"],"exceptions":["java.io.IOException"]},"int getVIntSize(long)":{"name":"getVIntSize","returnType":"int","args":["long"],"exceptions":[]},"void writeVLong(java.io.DataOutput, long) throws java.io.IOException":{"name":"writeVLong","returnType":"void","args":["java.io.DataOutput","long"],"exceptions":["java.io.IOException"]},"double readDouble([B, int)":{"name":"readDouble","returnType":"double","args":["[B","int"],"exceptions":[]}}},"org.apache.hadoop.record.meta.FieldTypeInfo":{"name":"org.apache.hadoop.record.meta.FieldTypeInfo","methods":{"int hashCode()":{"name":"hashCode","returnType":"int","args":[],"exceptions":[]},"java.lang.String getFieldID()":{"name":"getFieldID","returnType":"java.lang.String","args":[],"exceptions":[]},"boolean equals(java.lang.Object)":{"name":"equals","returnType":"boolean","args":["java.lang.Object"],"exceptions":[]},"boolean equals(org.apache.hadoop.record.meta.FieldTypeInfo)":{"name":"equals","returnType":"boolean","args":["org.apache.hadoop.record.meta.FieldTypeInfo"],"exceptions":[]},"org.apache.hadoop.record.meta.TypeID getTypeID()":{"name":"getTypeID","returnType":"org.apache.hadoop.record.meta.TypeID","args":[],"exceptions":[]}}},"org.apache.hadoop.io.MultipleIOException":{"name":"org.apache.hadoop.io.MultipleIOException","methods":{"java.util.List getExceptions()":{"name":"getExceptions","returnType":"java.util.List","args":[],"exceptions":[]},"java.io.IOException createIOException(java.util.List)":{"name":"createIOException","returnType":"java.io.IOException","args":["java.util.List"],"exceptions":[]}}},"org.apache.hadoop.fs.UnsupportedFileSystemException":{"name":"org.apache.hadoop.fs.UnsupportedFileSystemException","methods":{}},"org.apache.hadoop.record.BinaryRecordInput":{"name":"org.apache.hadoop.record.BinaryRecordInput","methods":{"long readLong(java.lang.String) throws java.io.IOException":{"name":"readLong","returnType":"long","args":["java.lang.String"],"exceptions":["java.io.IOException"]},"void endMap(java.lang.String) throws java.io.IOException":{"name":"endMap","returnType":"void","args":["java.lang.String"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.record.Buffer readBuffer(java.lang.String) throws java.io.IOException":{"name":"readBuffer","returnType":"org.apache.hadoop.record.Buffer","args":["java.lang.String"],"exceptions":["java.io.IOException"]},"void endVector(java.lang.String) throws java.io.IOException":{"name":"endVector","returnType":"void","args":["java.lang.String"],"exceptions":["java.io.IOException"]},"byte readByte(java.lang.String) throws java.io.IOException":{"name":"readByte","returnType":"byte","args":["java.lang.String"],"exceptions":["java.io.IOException"]},"float readFloat(java.lang.String) throws java.io.IOException":{"name":"readFloat","returnType":"float","args":["java.lang.String"],"exceptions":["java.io.IOException"]},"void startRecord(java.lang.String) throws java.io.IOException":{"name":"startRecord","returnType":"void","args":["java.lang.String"],"exceptions":["java.io.IOException"]},"double readDouble(java.lang.String) throws java.io.IOException":{"name":"readDouble","returnType":"double","args":["java.lang.String"],"exceptions":["java.io.IOException"]},"void endRecord(java.lang.String) throws java.io.IOException":{"name":"endRecord","returnType":"void","args":["java.lang.String"],"exceptions":["java.io.IOException"]},"java.lang.String readString(java.lang.String) throws java.io.IOException":{"name":"readString","returnType":"java.lang.String","args":["java.lang.String"],"exceptions":["java.io.IOException"]},"int readInt(java.lang.String) throws java.io.IOException":{"name":"readInt","returnType":"int","args":["java.lang.String"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.record.BinaryRecordInput get(java.io.DataInput)":{"name":"get","returnType":"org.apache.hadoop.record.BinaryRecordInput","args":["java.io.DataInput"],"exceptions":[]},"boolean readBool(java.lang.String) throws java.io.IOException":{"name":"readBool","returnType":"boolean","args":["java.lang.String"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.record.Index startVector(java.lang.String) throws java.io.IOException":{"name":"startVector","returnType":"org.apache.hadoop.record.Index","args":["java.lang.String"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.record.Index startMap(java.lang.String) throws java.io.IOException":{"name":"startMap","returnType":"org.apache.hadoop.record.Index","args":["java.lang.String"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.record.compiler.generated.TokenMgrError":{"name":"org.apache.hadoop.record.compiler.generated.TokenMgrError","methods":{"java.lang.String getMessage()":{"name":"getMessage","returnType":"java.lang.String","args":[],"exceptions":[]}}},"org.apache.hadoop.io.FloatWritable":{"name":"org.apache.hadoop.io.FloatWritable","methods":{"float get()":{"name":"get","returnType":"float","args":[],"exceptions":[]},"int hashCode()":{"name":"hashCode","returnType":"int","args":[],"exceptions":[]},"java.lang.String toString()":{"name":"toString","returnType":"java.lang.String","args":[],"exceptions":[]},"void write(java.io.DataOutput) throws java.io.IOException":{"name":"write","returnType":"void","args":["java.io.DataOutput"],"exceptions":["java.io.IOException"]},"void set(float)":{"name":"set","returnType":"void","args":["float"],"exceptions":[]},"int compareTo(org.apache.hadoop.io.FloatWritable)":{"name":"compareTo","returnType":"int","args":["org.apache.hadoop.io.FloatWritable"],"exceptions":[]},"int compareTo(java.lang.Object)":{"name":"compareTo","returnType":"int","args":["java.lang.Object"],"exceptions":[]},"boolean equals(java.lang.Object)":{"name":"equals","returnType":"boolean","args":["java.lang.Object"],"exceptions":[]},"void readFields(java.io.DataInput) throws java.io.IOException":{"name":"readFields","returnType":"void","args":["java.io.DataInput"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.fs.ChecksumException":{"name":"org.apache.hadoop.fs.ChecksumException","methods":{"long getPos()":{"name":"getPos","returnType":"long","args":[],"exceptions":[]}}},"org.apache.hadoop.util.Progressable":{"name":"org.apache.hadoop.util.Progressable","methods":{"void progress()":{"name":"progress","returnType":"void","args":[],"exceptions":[]}}},"org.apache.hadoop.util.bloom.DynamicBloomFilter":{"name":"org.apache.hadoop.util.bloom.DynamicBloomFilter","methods":{"java.lang.String toString()":{"name":"toString","returnType":"java.lang.String","args":[],"exceptions":[]},"void write(java.io.DataOutput) throws java.io.IOException":{"name":"write","returnType":"void","args":["java.io.DataOutput"],"exceptions":["java.io.IOException"]},"void add(org.apache.hadoop.util.bloom.Key)":{"name":"add","returnType":"void","args":["org.apache.hadoop.util.bloom.Key"],"exceptions":[]},"void not()":{"name":"not","returnType":"void","args":[],"exceptions":[]},"boolean membershipTest(org.apache.hadoop.util.bloom.Key)":{"name":"membershipTest","returnType":"boolean","args":["org.apache.hadoop.util.bloom.Key"],"exceptions":[]},"void and(org.apache.hadoop.util.bloom.Filter)":{"name":"and","returnType":"void","args":["org.apache.hadoop.util.bloom.Filter"],"exceptions":[]},"void readFields(java.io.DataInput) throws java.io.IOException":{"name":"readFields","returnType":"void","args":["java.io.DataInput"],"exceptions":["java.io.IOException"]},"void or(org.apache.hadoop.util.bloom.Filter)":{"name":"or","returnType":"void","args":["org.apache.hadoop.util.bloom.Filter"],"exceptions":[]},"void xor(org.apache.hadoop.util.bloom.Filter)":{"name":"xor","returnType":"void","args":["org.apache.hadoop.util.bloom.Filter"],"exceptions":[]}}},"org.apache.hadoop.util.bloom.HashFunction":{"name":"org.apache.hadoop.util.bloom.HashFunction","methods":{"[I hash(org.apache.hadoop.util.bloom.Key)":{"name":"hash","returnType":"[I","args":["org.apache.hadoop.util.bloom.Key"],"exceptions":[]},"void clear()":{"name":"clear","returnType":"void","args":[],"exceptions":[]}}},"org.apache.hadoop.tracing.SpanReceiverInfoBuilder":{"name":"org.apache.hadoop.tracing.SpanReceiverInfoBuilder","methods":{"org.apache.hadoop.tracing.SpanReceiverInfo build()":{"name":"build","returnType":"org.apache.hadoop.tracing.SpanReceiverInfo","args":[],"exceptions":[]},"void addConfigurationPair(java.lang.String, java.lang.String)":{"name":"addConfigurationPair","returnType":"void","args":["java.lang.String","java.lang.String"],"exceptions":[]}}},"org.apache.hadoop.io.LongWritable":{"name":"org.apache.hadoop.io.LongWritable","methods":{"int compareTo(org.apache.hadoop.io.LongWritable)":{"name":"compareTo","returnType":"int","args":["org.apache.hadoop.io.LongWritable"],"exceptions":[]},"int hashCode()":{"name":"hashCode","returnType":"int","args":[],"exceptions":[]},"void set(long)":{"name":"set","returnType":"void","args":["long"],"exceptions":[]},"long get()":{"name":"get","returnType":"long","args":[],"exceptions":[]},"java.lang.String toString()":{"name":"toString","returnType":"java.lang.String","args":[],"exceptions":[]},"void write(java.io.DataOutput) throws java.io.IOException":{"name":"write","returnType":"void","args":["java.io.DataOutput"],"exceptions":["java.io.IOException"]},"int compareTo(java.lang.Object)":{"name":"compareTo","returnType":"int","args":["java.lang.Object"],"exceptions":[]},"boolean equals(java.lang.Object)":{"name":"equals","returnType":"boolean","args":["java.lang.Object"],"exceptions":[]},"void readFields(java.io.DataInput) throws java.io.IOException":{"name":"readFields","returnType":"void","args":["java.io.DataInput"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.fs.permission.FsPermission":{"name":"org.apache.hadoop.fs.permission.FsPermission","methods":{"org.apache.hadoop.fs.permission.FsAction getUserAction()":{"name":"getUserAction","returnType":"org.apache.hadoop.fs.permission.FsAction","args":[],"exceptions":[]},"int hashCode()":{"name":"hashCode","returnType":"int","args":[],"exceptions":[]},"org.apache.hadoop.fs.permission.FsPermission valueOf(java.lang.String)":{"name":"valueOf","returnType":"org.apache.hadoop.fs.permission.FsPermission","args":["java.lang.String"],"exceptions":[]},"void fromShort(short)":{"name":"fromShort","returnType":"void","args":["short"],"exceptions":[]},"org.apache.hadoop.fs.permission.FsPermission createImmutable(short)":{"name":"createImmutable","returnType":"org.apache.hadoop.fs.permission.FsPermission","args":["short"],"exceptions":[]},"org.apache.hadoop.fs.permission.FsPermission read(java.io.DataInput) throws java.io.IOException":{"name":"read","returnType":"org.apache.hadoop.fs.permission.FsPermission","args":["java.io.DataInput"],"exceptions":["java.io.IOException"]},"short toShort()":{"name":"toShort","returnType":"short","args":[],"exceptions":[]},"org.apache.hadoop.fs.permission.FsAction getGroupAction()":{"name":"getGroupAction","returnType":"org.apache.hadoop.fs.permission.FsAction","args":[],"exceptions":[]},"boolean getEncryptedBit()":{"name":"getEncryptedBit","returnType":"boolean","args":[],"exceptions":[]},"org.apache.hadoop.fs.permission.FsAction getOtherAction()":{"name":"getOtherAction","returnType":"org.apache.hadoop.fs.permission.FsAction","args":[],"exceptions":[]},"void setUMask(org.apache.hadoop.conf.Configuration, org.apache.hadoop.fs.permission.FsPermission)":{"name":"setUMask","returnType":"void","args":["org.apache.hadoop.conf.Configuration","org.apache.hadoop.fs.permission.FsPermission"],"exceptions":[]},"boolean equals(java.lang.Object)":{"name":"equals","returnType":"boolean","args":["java.lang.Object"],"exceptions":[]},"org.apache.hadoop.fs.permission.FsPermission getFileDefault()":{"name":"getFileDefault","returnType":"org.apache.hadoop.fs.permission.FsPermission","args":[],"exceptions":[]},"org.apache.hadoop.fs.permission.FsPermission getDirDefault()":{"name":"getDirDefault","returnType":"org.apache.hadoop.fs.permission.FsPermission","args":[],"exceptions":[]},"org.apache.hadoop.fs.permission.FsPermission getCachePoolDefault()":{"name":"getCachePoolDefault","returnType":"org.apache.hadoop.fs.permission.FsPermission","args":[],"exceptions":[]},"org.apache.hadoop.fs.permission.FsPermission applyUMask(org.apache.hadoop.fs.permission.FsPermission)":{"name":"applyUMask","returnType":"org.apache.hadoop.fs.permission.FsPermission","args":["org.apache.hadoop.fs.permission.FsPermission"],"exceptions":[]},"java.lang.String toString()":{"name":"toString","returnType":"java.lang.String","args":[],"exceptions":[]},"void write(java.io.DataOutput) throws java.io.IOException":{"name":"write","returnType":"void","args":["java.io.DataOutput"],"exceptions":["java.io.IOException"]},"short toExtendedShort()":{"name":"toExtendedShort","returnType":"short","args":[],"exceptions":[]},"boolean getStickyBit()":{"name":"getStickyBit","returnType":"boolean","args":[],"exceptions":[]},"boolean getAclBit()":{"name":"getAclBit","returnType":"boolean","args":[],"exceptions":[]},"org.apache.hadoop.fs.permission.FsPermission getDefault()":{"name":"getDefault","returnType":"org.apache.hadoop.fs.permission.FsPermission","args":[],"exceptions":[]},"org.apache.hadoop.fs.permission.FsPermission getUMask(org.apache.hadoop.conf.Configuration)":{"name":"getUMask","returnType":"org.apache.hadoop.fs.permission.FsPermission","args":["org.apache.hadoop.conf.Configuration"],"exceptions":[]},"void readFields(java.io.DataInput) throws java.io.IOException":{"name":"readFields","returnType":"void","args":["java.io.DataInput"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.io.DefaultStringifier":{"name":"org.apache.hadoop.io.DefaultStringifier","methods":{"java.lang.Object load(org.apache.hadoop.conf.Configuration, java.lang.String, java.lang.Class) throws java.io.IOException":{"name":"load","returnType":"java.lang.Object","args":["org.apache.hadoop.conf.Configuration","java.lang.String","java.lang.Class"],"exceptions":["java.io.IOException"]},"void store(org.apache.hadoop.conf.Configuration, java.lang.Object, java.lang.String) throws java.io.IOException":{"name":"store","returnType":"void","args":["org.apache.hadoop.conf.Configuration","java.lang.Object","java.lang.String"],"exceptions":["java.io.IOException"]},"void storeArray(org.apache.hadoop.conf.Configuration, [Ljava.lang.Object;, java.lang.String) throws java.io.IOException":{"name":"storeArray","returnType":"void","args":["org.apache.hadoop.conf.Configuration","[Ljava.lang.Object;","java.lang.String"],"exceptions":["java.io.IOException"]},"[Ljava.lang.Object; loadArray(org.apache.hadoop.conf.Configuration, java.lang.String, java.lang.Class) throws java.io.IOException":{"name":"loadArray","returnType":"[Ljava.lang.Object;","args":["org.apache.hadoop.conf.Configuration","java.lang.String","java.lang.Class"],"exceptions":["java.io.IOException"]},"java.lang.Object fromString(java.lang.String) throws java.io.IOException":{"name":"fromString","returnType":"java.lang.Object","args":["java.lang.String"],"exceptions":["java.io.IOException"]},"java.lang.String toString(java.lang.Object) throws java.io.IOException":{"name":"toString","returnType":"java.lang.String","args":["java.lang.Object"],"exceptions":["java.io.IOException"]},"void close() throws java.io.IOException":{"name":"close","returnType":"void","args":[],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.conf.Configured":{"name":"org.apache.hadoop.conf.Configured","methods":{"org.apache.hadoop.conf.Configuration getConf()":{"name":"getConf","returnType":"org.apache.hadoop.conf.Configuration","args":[],"exceptions":[]},"void setConf(org.apache.hadoop.conf.Configuration)":{"name":"setConf","returnType":"void","args":["org.apache.hadoop.conf.Configuration"],"exceptions":[]}}},"org.apache.hadoop.fs.Path":{"name":"org.apache.hadoop.fs.Path","methods":{"boolean isAbsolute()":{"name":"isAbsolute","returnType":"boolean","args":[],"exceptions":[]},"int hashCode()":{"name":"hashCode","returnType":"int","args":[],"exceptions":[]},"org.apache.hadoop.fs.Path getParent()":{"name":"getParent","returnType":"org.apache.hadoop.fs.Path","args":[],"exceptions":[]},"org.apache.hadoop.fs.Path makeQualified(java.net.URI, org.apache.hadoop.fs.Path)":{"name":"makeQualified","returnType":"org.apache.hadoop.fs.Path","args":["java.net.URI","org.apache.hadoop.fs.Path"],"exceptions":[]},"org.apache.hadoop.fs.Path getPathWithoutSchemeAndAuthority(org.apache.hadoop.fs.Path)":{"name":"getPathWithoutSchemeAndAuthority","returnType":"org.apache.hadoop.fs.Path","args":["org.apache.hadoop.fs.Path"],"exceptions":[]},"int compareTo(java.lang.Object)":{"name":"compareTo","returnType":"int","args":["java.lang.Object"],"exceptions":[]},"boolean equals(java.lang.Object)":{"name":"equals","returnType":"boolean","args":["java.lang.Object"],"exceptions":[]},"boolean isRoot()":{"name":"isRoot","returnType":"boolean","args":[],"exceptions":[]},"boolean isWindowsAbsolutePath(java.lang.String, boolean)":{"name":"isWindowsAbsolutePath","returnType":"boolean","args":["java.lang.String","boolean"],"exceptions":[]},"boolean isUriPathAbsolute()":{"name":"isUriPathAbsolute","returnType":"boolean","args":[],"exceptions":[]},"org.apache.hadoop.fs.Path suffix(java.lang.String)":{"name":"suffix","returnType":"org.apache.hadoop.fs.Path","args":["java.lang.String"],"exceptions":[]},"java.lang.String toString()":{"name":"toString","returnType":"java.lang.String","args":[],"exceptions":[]},"org.apache.hadoop.fs.FileSystem getFileSystem(org.apache.hadoop.conf.Configuration) throws java.io.IOException":{"name":"getFileSystem","returnType":"org.apache.hadoop.fs.FileSystem","args":["org.apache.hadoop.conf.Configuration"],"exceptions":["java.io.IOException"]},"java.net.URI toUri()":{"name":"toUri","returnType":"java.net.URI","args":[],"exceptions":[]},"java.lang.String getName()":{"name":"getName","returnType":"java.lang.String","args":[],"exceptions":[]},"org.apache.hadoop.fs.Path mergePaths(org.apache.hadoop.fs.Path, org.apache.hadoop.fs.Path)":{"name":"mergePaths","returnType":"org.apache.hadoop.fs.Path","args":["org.apache.hadoop.fs.Path","org.apache.hadoop.fs.Path"],"exceptions":[]},"org.apache.hadoop.fs.Path makeQualified(org.apache.hadoop.fs.FileSystem)":{"name":"makeQualified","returnType":"org.apache.hadoop.fs.Path","args":["org.apache.hadoop.fs.FileSystem"],"exceptions":[]},"boolean isAbsoluteAndSchemeAuthorityNull()":{"name":"isAbsoluteAndSchemeAuthorityNull","returnType":"boolean","args":[],"exceptions":[]},"int depth()":{"name":"depth","returnType":"int","args":[],"exceptions":[]}}},"org.apache.hadoop.io.GenericWritable":{"name":"org.apache.hadoop.io.GenericWritable","methods":{"void set(org.apache.hadoop.io.Writable)":{"name":"set","returnType":"void","args":["org.apache.hadoop.io.Writable"],"exceptions":[]},"java.lang.String toString()":{"name":"toString","returnType":"java.lang.String","args":[],"exceptions":[]},"void write(java.io.DataOutput) throws java.io.IOException":{"name":"write","returnType":"void","args":["java.io.DataOutput"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.conf.Configuration getConf()":{"name":"getConf","returnType":"org.apache.hadoop.conf.Configuration","args":[],"exceptions":[]},"void setConf(org.apache.hadoop.conf.Configuration)":{"name":"setConf","returnType":"void","args":["org.apache.hadoop.conf.Configuration"],"exceptions":[]},"org.apache.hadoop.io.Writable get()":{"name":"get","returnType":"org.apache.hadoop.io.Writable","args":[],"exceptions":[]},"void readFields(java.io.DataInput) throws java.io.IOException":{"name":"readFields","returnType":"void","args":["java.io.DataInput"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.conf.Configurable":{"name":"org.apache.hadoop.conf.Configurable","methods":{"org.apache.hadoop.conf.Configuration getConf()":{"name":"getConf","returnType":"org.apache.hadoop.conf.Configuration","args":[],"exceptions":[]},"void setConf(org.apache.hadoop.conf.Configuration)":{"name":"setConf","returnType":"void","args":["org.apache.hadoop.conf.Configuration"],"exceptions":[]}}},"org.apache.hadoop.io.MapFile":{"name":"org.apache.hadoop.io.MapFile","methods":{"void main([Ljava.lang.String;) throws java.lang.Exception":{"name":"main","returnType":"void","args":["[Ljava.lang.String;"],"exceptions":["java.lang.Exception"]},"long fix(org.apache.hadoop.fs.FileSystem, org.apache.hadoop.fs.Path, java.lang.Class, java.lang.Class, boolean, org.apache.hadoop.conf.Configuration) throws java.lang.Exception":{"name":"fix","returnType":"long","args":["org.apache.hadoop.fs.FileSystem","org.apache.hadoop.fs.Path","java.lang.Class","java.lang.Class","boolean","org.apache.hadoop.conf.Configuration"],"exceptions":["java.lang.Exception"]},"void delete(org.apache.hadoop.fs.FileSystem, java.lang.String) throws java.io.IOException":{"name":"delete","returnType":"void","args":["org.apache.hadoop.fs.FileSystem","java.lang.String"],"exceptions":["java.io.IOException"]},"void rename(org.apache.hadoop.fs.FileSystem, java.lang.String, java.lang.String) throws java.io.IOException":{"name":"rename","returnType":"void","args":["org.apache.hadoop.fs.FileSystem","java.lang.String","java.lang.String"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.record.meta.Utils":{"name":"org.apache.hadoop.record.meta.Utils","methods":{"void skip(org.apache.hadoop.record.RecordInput, java.lang.String, org.apache.hadoop.record.meta.TypeID) throws java.io.IOException":{"name":"skip","returnType":"void","args":["org.apache.hadoop.record.RecordInput","java.lang.String","org.apache.hadoop.record.meta.TypeID"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.fs.ReadOption":{"name":"org.apache.hadoop.fs.ReadOption","methods":{"[Lorg.apache.hadoop.fs.ReadOption; values()":{"name":"values","returnType":"[Lorg.apache.hadoop.fs.ReadOption;","args":[],"exceptions":[]},"org.apache.hadoop.fs.ReadOption valueOf(java.lang.String)":{"name":"valueOf","returnType":"org.apache.hadoop.fs.ReadOption","args":["java.lang.String"],"exceptions":[]}}},"org.apache.hadoop.fs.AvroFSInput":{"name":"org.apache.hadoop.fs.AvroFSInput","methods":{"long tell() throws java.io.IOException":{"name":"tell","returnType":"long","args":[],"exceptions":["java.io.IOException"]},"void seek(long) throws java.io.IOException":{"name":"seek","returnType":"void","args":["long"],"exceptions":["java.io.IOException"]},"int read([B, int, int) throws java.io.IOException":{"name":"read","returnType":"int","args":["[B","int","int"],"exceptions":["java.io.IOException"]},"long length()":{"name":"length","returnType":"long","args":[],"exceptions":[]},"void close() throws java.io.IOException":{"name":"close","returnType":"void","args":[],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.record.meta.TypeID":{"name":"org.apache.hadoop.record.meta.TypeID","methods":{"int hashCode()":{"name":"hashCode","returnType":"int","args":[],"exceptions":[]},"byte getTypeVal()":{"name":"getTypeVal","returnType":"byte","args":[],"exceptions":[]},"boolean equals(java.lang.Object)":{"name":"equals","returnType":"boolean","args":["java.lang.Object"],"exceptions":[]}}},"org.apache.hadoop.util.bloom.CountingBloomFilter":{"name":"org.apache.hadoop.util.bloom.CountingBloomFilter","methods":{"java.lang.String toString()":{"name":"toString","returnType":"java.lang.String","args":[],"exceptions":[]},"void write(java.io.DataOutput) throws java.io.IOException":{"name":"write","returnType":"void","args":["java.io.DataOutput"],"exceptions":["java.io.IOException"]},"void add(org.apache.hadoop.util.bloom.Key)":{"name":"add","returnType":"void","args":["org.apache.hadoop.util.bloom.Key"],"exceptions":[]},"void not()":{"name":"not","returnType":"void","args":[],"exceptions":[]},"boolean membershipTest(org.apache.hadoop.util.bloom.Key)":{"name":"membershipTest","returnType":"boolean","args":["org.apache.hadoop.util.bloom.Key"],"exceptions":[]},"void and(org.apache.hadoop.util.bloom.Filter)":{"name":"and","returnType":"void","args":["org.apache.hadoop.util.bloom.Filter"],"exceptions":[]},"void delete(org.apache.hadoop.util.bloom.Key)":{"name":"delete","returnType":"void","args":["org.apache.hadoop.util.bloom.Key"],"exceptions":[]},"void readFields(java.io.DataInput) throws java.io.IOException":{"name":"readFields","returnType":"void","args":["java.io.DataInput"],"exceptions":["java.io.IOException"]},"void or(org.apache.hadoop.util.bloom.Filter)":{"name":"or","returnType":"void","args":["org.apache.hadoop.util.bloom.Filter"],"exceptions":[]},"void xor(org.apache.hadoop.util.bloom.Filter)":{"name":"xor","returnType":"void","args":["org.apache.hadoop.util.bloom.Filter"],"exceptions":[]},"int approximateCount(org.apache.hadoop.util.bloom.Key)":{"name":"approximateCount","returnType":"int","args":["org.apache.hadoop.util.bloom.Key"],"exceptions":[]}}},"org.apache.hadoop.util.bloom.RetouchedBloomFilter":{"name":"org.apache.hadoop.util.bloom.RetouchedBloomFilter","methods":{"void selectiveClearing(org.apache.hadoop.util.bloom.Key, short)":{"name":"selectiveClearing","returnType":"void","args":["org.apache.hadoop.util.bloom.Key","short"],"exceptions":[]},"void write(java.io.DataOutput) throws java.io.IOException":{"name":"write","returnType":"void","args":["java.io.DataOutput"],"exceptions":["java.io.IOException"]},"void addFalsePositive([Lorg.apache.hadoop.util.bloom.Key;)":{"name":"addFalsePositive","returnType":"void","args":["[Lorg.apache.hadoop.util.bloom.Key;"],"exceptions":[]},"void add(org.apache.hadoop.util.bloom.Key)":{"name":"add","returnType":"void","args":["org.apache.hadoop.util.bloom.Key"],"exceptions":[]},"void addFalsePositive(java.util.Collection)":{"name":"addFalsePositive","returnType":"void","args":["java.util.Collection"],"exceptions":[]},"void addFalsePositive(org.apache.hadoop.util.bloom.Key)":{"name":"addFalsePositive","returnType":"void","args":["org.apache.hadoop.util.bloom.Key"],"exceptions":[]},"void addFalsePositive(java.util.List)":{"name":"addFalsePositive","returnType":"void","args":["java.util.List"],"exceptions":[]},"void readFields(java.io.DataInput) throws java.io.IOException":{"name":"readFields","returnType":"void","args":["java.io.DataInput"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.record.compiler.generated.ParseException":{"name":"org.apache.hadoop.record.compiler.generated.ParseException","methods":{"java.lang.String getMessage()":{"name":"getMessage","returnType":"java.lang.String","args":[],"exceptions":[]}}},"org.apache.hadoop.record.compiler.generated.Token":{"name":"org.apache.hadoop.record.compiler.generated.Token","methods":{"java.lang.String toString()":{"name":"toString","returnType":"java.lang.String","args":[],"exceptions":[]},"org.apache.hadoop.record.compiler.generated.Token newToken(int)":{"name":"newToken","returnType":"org.apache.hadoop.record.compiler.generated.Token","args":["int"],"exceptions":[]}}},"org.apache.hadoop.record.compiler.JDouble":{"name":"org.apache.hadoop.record.compiler.JDouble","methods":{}},"org.apache.hadoop.io.SortedMapWritable":{"name":"org.apache.hadoop.io.SortedMapWritable","methods":{"int hashCode()":{"name":"hashCode","returnType":"int","args":[],"exceptions":[]},"java.lang.Object remove(java.lang.Object)":{"name":"remove","returnType":"java.lang.Object","args":["java.lang.Object"],"exceptions":[]},"java.util.SortedMap headMap(java.lang.Object)":{"name":"headMap","returnType":"java.util.SortedMap","args":["java.lang.Object"],"exceptions":[]},"int size()":{"name":"size","returnType":"int","args":[],"exceptions":[]},"org.apache.hadoop.io.WritableComparable firstKey()":{"name":"firstKey","returnType":"org.apache.hadoop.io.WritableComparable","args":[],"exceptions":[]},"java.util.SortedMap tailMap(org.apache.hadoop.io.WritableComparable)":{"name":"tailMap","returnType":"java.util.SortedMap","args":["org.apache.hadoop.io.WritableComparable"],"exceptions":[]},"java.util.SortedMap subMap(java.lang.Object, java.lang.Object)":{"name":"subMap","returnType":"java.util.SortedMap","args":["java.lang.Object","java.lang.Object"],"exceptions":[]},"java.util.SortedMap subMap(org.apache.hadoop.io.WritableComparable, org.apache.hadoop.io.WritableComparable)":{"name":"subMap","returnType":"java.util.SortedMap","args":["org.apache.hadoop.io.WritableComparable","org.apache.hadoop.io.WritableComparable"],"exceptions":[]},"org.apache.hadoop.io.Writable remove(java.lang.Object)":{"name":"remove","returnType":"org.apache.hadoop.io.Writable","args":["java.lang.Object"],"exceptions":[]},"java.util.Collection values()":{"name":"values","returnType":"java.util.Collection","args":[],"exceptions":[]},"boolean equals(java.lang.Object)":{"name":"equals","returnType":"boolean","args":["java.lang.Object"],"exceptions":[]},"java.util.Comparator comparator()":{"name":"comparator","returnType":"java.util.Comparator","args":[],"exceptions":[]},"org.apache.hadoop.io.Writable put(org.apache.hadoop.io.WritableComparable, org.apache.hadoop.io.Writable)":{"name":"put","returnType":"org.apache.hadoop.io.Writable","args":["org.apache.hadoop.io.WritableComparable","org.apache.hadoop.io.Writable"],"exceptions":[]},"void write(java.io.DataOutput) throws java.io.IOException":{"name":"write","returnType":"void","args":["java.io.DataOutput"],"exceptions":["java.io.IOException"]},"boolean isEmpty()":{"name":"isEmpty","returnType":"boolean","args":[],"exceptions":[]},"java.lang.Object lastKey()":{"name":"lastKey","returnType":"java.lang.Object","args":[],"exceptions":[]},"java.util.Set entrySet()":{"name":"entrySet","returnType":"java.util.Set","args":[],"exceptions":[]},"org.apache.hadoop.io.Writable get(java.lang.Object)":{"name":"get","returnType":"org.apache.hadoop.io.Writable","args":["java.lang.Object"],"exceptions":[]},"java.util.Set keySet()":{"name":"keySet","returnType":"java.util.Set","args":[],"exceptions":[]},"org.apache.hadoop.io.WritableComparable lastKey()":{"name":"lastKey","returnType":"org.apache.hadoop.io.WritableComparable","args":[],"exceptions":[]},"java.util.SortedMap tailMap(java.lang.Object)":{"name":"tailMap","returnType":"java.util.SortedMap","args":["java.lang.Object"],"exceptions":[]},"void clear()":{"name":"clear","returnType":"void","args":[],"exceptions":[]},"boolean containsKey(java.lang.Object)":{"name":"containsKey","returnType":"boolean","args":["java.lang.Object"],"exceptions":[]},"java.lang.Object get(java.lang.Object)":{"name":"get","returnType":"java.lang.Object","args":["java.lang.Object"],"exceptions":[]},"java.util.SortedMap headMap(org.apache.hadoop.io.WritableComparable)":{"name":"headMap","returnType":"java.util.SortedMap","args":["org.apache.hadoop.io.WritableComparable"],"exceptions":[]},"boolean containsValue(java.lang.Object)":{"name":"containsValue","returnType":"boolean","args":["java.lang.Object"],"exceptions":[]},"void putAll(java.util.Map)":{"name":"putAll","returnType":"void","args":["java.util.Map"],"exceptions":[]},"java.lang.Object put(java.lang.Object, java.lang.Object)":{"name":"put","returnType":"java.lang.Object","args":["java.lang.Object","java.lang.Object"],"exceptions":[]},"java.lang.Object firstKey()":{"name":"firstKey","returnType":"java.lang.Object","args":[],"exceptions":[]},"void readFields(java.io.DataInput) throws java.io.IOException":{"name":"readFields","returnType":"void","args":["java.io.DataInput"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.record.compiler.JType":{"name":"org.apache.hadoop.record.compiler.JType","methods":{}},"org.apache.hadoop.util.Tool":{"name":"org.apache.hadoop.util.Tool","methods":{"int run([Ljava.lang.String;) throws java.lang.Exception":{"name":"run","returnType":"int","args":["[Ljava.lang.String;"],"exceptions":["java.lang.Exception"]}}},"org.apache.hadoop.record.compiler.JField":{"name":"org.apache.hadoop.record.compiler.JField","methods":{}},"org.apache.hadoop.record.compiler.CodeBuffer":{"name":"org.apache.hadoop.record.compiler.CodeBuffer","methods":{"java.lang.String toString()":{"name":"toString","returnType":"java.lang.String","args":[],"exceptions":[]}}},"org.apache.hadoop.record.meta.MapTypeID":{"name":"org.apache.hadoop.record.meta.MapTypeID","methods":{"int hashCode()":{"name":"hashCode","returnType":"int","args":[],"exceptions":[]},"org.apache.hadoop.record.meta.TypeID getValueTypeID()":{"name":"getValueTypeID","returnType":"org.apache.hadoop.record.meta.TypeID","args":[],"exceptions":[]},"org.apache.hadoop.record.meta.TypeID getKeyTypeID()":{"name":"getKeyTypeID","returnType":"org.apache.hadoop.record.meta.TypeID","args":[],"exceptions":[]},"boolean equals(java.lang.Object)":{"name":"equals","returnType":"boolean","args":["java.lang.Object"],"exceptions":[]}}},"org.apache.hadoop.log.metrics.EventCounter":{"name":"org.apache.hadoop.log.metrics.EventCounter","methods":{"void append(org.apache.log4j.spi.LoggingEvent)":{"name":"append","returnType":"void","args":["org.apache.log4j.spi.LoggingEvent"],"exceptions":[]},"long getFatal()":{"name":"getFatal","returnType":"long","args":[],"exceptions":[]},"long getWarn()":{"name":"getWarn","returnType":"long","args":[],"exceptions":[]},"long getError()":{"name":"getError","returnType":"long","args":[],"exceptions":[]},"long getInfo()":{"name":"getInfo","returnType":"long","args":[],"exceptions":[]},"void close()":{"name":"close","returnType":"void","args":[],"exceptions":[]},"boolean requiresLayout()":{"name":"requiresLayout","returnType":"boolean","args":[],"exceptions":[]}}},"org.apache.hadoop.fs.permission.AccessControlException":{"name":"org.apache.hadoop.fs.permission.AccessControlException","methods":{}},"org.apache.hadoop.record.compiler.JByte":{"name":"org.apache.hadoop.record.compiler.JByte","methods":{}},"org.apache.hadoop.io.ArrayFile":{"name":"org.apache.hadoop.io.ArrayFile","methods":{}},"org.apache.hadoop.record.compiler.generated.SimpleCharStream":{"name":"org.apache.hadoop.record.compiler.generated.SimpleCharStream","methods":{"void ReInit(java.io.InputStream)":{"name":"ReInit","returnType":"void","args":["java.io.InputStream"],"exceptions":[]},"void ReInit(java.io.InputStream, int, int, int)":{"name":"ReInit","returnType":"void","args":["java.io.InputStream","int","int","int"],"exceptions":[]},"void ReInit(java.io.InputStream, int, int)":{"name":"ReInit","returnType":"void","args":["java.io.InputStream","int","int"],"exceptions":[]},"void ReInit(java.io.InputStream, java.lang.String, int, int) throws java.io.UnsupportedEncodingException":{"name":"ReInit","returnType":"void","args":["java.io.InputStream","java.lang.String","int","int"],"exceptions":["java.io.UnsupportedEncodingException"]},"java.lang.String GetImage()":{"name":"GetImage","returnType":"java.lang.String","args":[],"exceptions":[]},"void Done()":{"name":"Done","returnType":"void","args":[],"exceptions":[]},"void adjustBeginLineColumn(int, int)":{"name":"adjustBeginLineColumn","returnType":"void","args":["int","int"],"exceptions":[]},"int getEndColumn()":{"name":"getEndColumn","returnType":"int","args":[],"exceptions":[]},"void ReInit(java.io.InputStream, java.lang.String) throws java.io.UnsupportedEncodingException":{"name":"ReInit","returnType":"void","args":["java.io.InputStream","java.lang.String"],"exceptions":["java.io.UnsupportedEncodingException"]},"void ReInit(java.io.Reader, int, int, int)":{"name":"ReInit","returnType":"void","args":["java.io.Reader","int","int","int"],"exceptions":[]},"[C GetSuffix(int)":{"name":"GetSuffix","returnType":"[C","args":["int"],"exceptions":[]},"int getBeginLine()":{"name":"getBeginLine","returnType":"int","args":[],"exceptions":[]},"void ReInit(java.io.Reader, int, int)":{"name":"ReInit","returnType":"void","args":["java.io.Reader","int","int"],"exceptions":[]},"int getEndLine()":{"name":"getEndLine","returnType":"int","args":[],"exceptions":[]},"void ReInit(java.io.InputStream, java.lang.String, int, int, int) throws java.io.UnsupportedEncodingException":{"name":"ReInit","returnType":"void","args":["java.io.InputStream","java.lang.String","int","int","int"],"exceptions":["java.io.UnsupportedEncodingException"]},"int getBeginColumn()":{"name":"getBeginColumn","returnType":"int","args":[],"exceptions":[]},"char BeginToken() throws java.io.IOException":{"name":"BeginToken","returnType":"char","args":[],"exceptions":["java.io.IOException"]},"char readChar() throws java.io.IOException":{"name":"readChar","returnType":"char","args":[],"exceptions":["java.io.IOException"]},"void backup(int)":{"name":"backup","returnType":"void","args":["int"],"exceptions":[]},"void ReInit(java.io.Reader)":{"name":"ReInit","returnType":"void","args":["java.io.Reader"],"exceptions":[]}}},"org.apache.hadoop.io.EnumSetWritable":{"name":"org.apache.hadoop.io.EnumSetWritable","methods":{"int hashCode()":{"name":"hashCode","returnType":"int","args":[],"exceptions":[]},"org.apache.hadoop.conf.Configuration getConf()":{"name":"getConf","returnType":"org.apache.hadoop.conf.Configuration","args":[],"exceptions":[]},"int size()":{"name":"size","returnType":"int","args":[],"exceptions":[]},"void set(java.util.EnumSet, java.lang.Class)":{"name":"set","returnType":"void","args":["java.util.EnumSet","java.lang.Class"],"exceptions":[]},"boolean add(java.lang.Enum)":{"name":"add","returnType":"boolean","args":["java.lang.Enum"],"exceptions":[]},"boolean equals(java.lang.Object)":{"name":"equals","returnType":"boolean","args":["java.lang.Object"],"exceptions":[]},"boolean add(java.lang.Object)":{"name":"add","returnType":"boolean","args":["java.lang.Object"],"exceptions":[]},"java.util.EnumSet get()":{"name":"get","returnType":"java.util.EnumSet","args":[],"exceptions":[]},"java.lang.String toString()":{"name":"toString","returnType":"java.lang.String","args":[],"exceptions":[]},"void write(java.io.DataOutput) throws java.io.IOException":{"name":"write","returnType":"void","args":["java.io.DataOutput"],"exceptions":["java.io.IOException"]},"void setConf(org.apache.hadoop.conf.Configuration)":{"name":"setConf","returnType":"void","args":["org.apache.hadoop.conf.Configuration"],"exceptions":[]},"java.lang.Class getElementType()":{"name":"getElementType","returnType":"java.lang.Class","args":[],"exceptions":[]},"java.util.Iterator iterator()":{"name":"iterator","returnType":"java.util.Iterator","args":[],"exceptions":[]},"void readFields(java.io.DataInput) throws java.io.IOException":{"name":"readFields","returnType":"void","args":["java.io.DataInput"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.fs.PathFilter":{"name":"org.apache.hadoop.fs.PathFilter","methods":{"boolean accept(org.apache.hadoop.fs.Path)":{"name":"accept","returnType":"boolean","args":["org.apache.hadoop.fs.Path"],"exceptions":[]}}},"org.apache.hadoop.io.BinaryComparable":{"name":"org.apache.hadoop.io.BinaryComparable","methods":{"int hashCode()":{"name":"hashCode","returnType":"int","args":[],"exceptions":[]},"int compareTo(org.apache.hadoop.io.BinaryComparable)":{"name":"compareTo","returnType":"int","args":["org.apache.hadoop.io.BinaryComparable"],"exceptions":[]},"int compareTo([B, int, int)":{"name":"compareTo","returnType":"int","args":["[B","int","int"],"exceptions":[]},"int getLength()":{"name":"getLength","returnType":"int","args":[],"exceptions":[]},"int compareTo(java.lang.Object)":{"name":"compareTo","returnType":"int","args":["java.lang.Object"],"exceptions":[]},"[B getBytes()":{"name":"getBytes","returnType":"[B","args":[],"exceptions":[]},"boolean equals(java.lang.Object)":{"name":"equals","returnType":"boolean","args":["java.lang.Object"],"exceptions":[]}}},"org.apache.hadoop.net.ConnectTimeoutException":{"name":"org.apache.hadoop.net.ConnectTimeoutException","methods":{}},"org.apache.hadoop.io.MapWritable":{"name":"org.apache.hadoop.io.MapWritable","methods":{"int hashCode()":{"name":"hashCode","returnType":"int","args":[],"exceptions":[]},"boolean containsKey(java.lang.Object)":{"name":"containsKey","returnType":"boolean","args":["java.lang.Object"],"exceptions":[]},"java.lang.Object remove(java.lang.Object)":{"name":"remove","returnType":"java.lang.Object","args":["java.lang.Object"],"exceptions":[]},"java.lang.Object get(java.lang.Object)":{"name":"get","returnType":"java.lang.Object","args":["java.lang.Object"],"exceptions":[]},"org.apache.hadoop.io.Writable put(org.apache.hadoop.io.Writable, org.apache.hadoop.io.Writable)":{"name":"put","returnType":"org.apache.hadoop.io.Writable","args":["org.apache.hadoop.io.Writable","org.apache.hadoop.io.Writable"],"exceptions":[]},"int size()":{"name":"size","returnType":"int","args":[],"exceptions":[]},"boolean containsValue(java.lang.Object)":{"name":"containsValue","returnType":"boolean","args":["java.lang.Object"],"exceptions":[]},"org.apache.hadoop.io.Writable remove(java.lang.Object)":{"name":"remove","returnType":"org.apache.hadoop.io.Writable","args":["java.lang.Object"],"exceptions":[]},"java.util.Collection values()":{"name":"values","returnType":"java.util.Collection","args":[],"exceptions":[]},"boolean equals(java.lang.Object)":{"name":"equals","returnType":"boolean","args":["java.lang.Object"],"exceptions":[]},"void putAll(java.util.Map)":{"name":"putAll","returnType":"void","args":["java.util.Map"],"exceptions":[]},"void write(java.io.DataOutput) throws java.io.IOException":{"name":"write","returnType":"void","args":["java.io.DataOutput"],"exceptions":["java.io.IOException"]},"java.lang.Object put(java.lang.Object, java.lang.Object)":{"name":"put","returnType":"java.lang.Object","args":["java.lang.Object","java.lang.Object"],"exceptions":[]},"boolean isEmpty()":{"name":"isEmpty","returnType":"boolean","args":[],"exceptions":[]},"java.util.Set entrySet()":{"name":"entrySet","returnType":"java.util.Set","args":[],"exceptions":[]},"org.apache.hadoop.io.Writable get(java.lang.Object)":{"name":"get","returnType":"org.apache.hadoop.io.Writable","args":["java.lang.Object"],"exceptions":[]},"java.util.Set keySet()":{"name":"keySet","returnType":"java.util.Set","args":[],"exceptions":[]},"void clear()":{"name":"clear","returnType":"void","args":[],"exceptions":[]},"void readFields(java.io.DataInput) throws java.io.IOException":{"name":"readFields","returnType":"void","args":["java.io.DataInput"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.io.ByteBufferPool":{"name":"org.apache.hadoop.io.ByteBufferPool","methods":{"void putBuffer(java.nio.ByteBuffer)":{"name":"putBuffer","returnType":"void","args":["java.nio.ByteBuffer"],"exceptions":[]},"java.nio.ByteBuffer getBuffer(boolean, int)":{"name":"getBuffer","returnType":"java.nio.ByteBuffer","args":["boolean","int"],"exceptions":[]}}},"org.apache.hadoop.io.DoubleWritable":{"name":"org.apache.hadoop.io.DoubleWritable","methods":{"int hashCode()":{"name":"hashCode","returnType":"int","args":[],"exceptions":[]},"double get()":{"name":"get","returnType":"double","args":[],"exceptions":[]},"java.lang.String toString()":{"name":"toString","returnType":"java.lang.String","args":[],"exceptions":[]},"int compareTo(org.apache.hadoop.io.DoubleWritable)":{"name":"compareTo","returnType":"int","args":["org.apache.hadoop.io.DoubleWritable"],"exceptions":[]},"void write(java.io.DataOutput) throws java.io.IOException":{"name":"write","returnType":"void","args":["java.io.DataOutput"],"exceptions":["java.io.IOException"]},"int compareTo(java.lang.Object)":{"name":"compareTo","returnType":"int","args":["java.lang.Object"],"exceptions":[]},"boolean equals(java.lang.Object)":{"name":"equals","returnType":"boolean","args":["java.lang.Object"],"exceptions":[]},"void set(double)":{"name":"set","returnType":"void","args":["double"],"exceptions":[]},"void readFields(java.io.DataInput) throws java.io.IOException":{"name":"readFields","returnType":"void","args":["java.io.DataInput"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.record.compiler.JBuffer":{"name":"org.apache.hadoop.record.compiler.JBuffer","methods":{}},"org.apache.hadoop.io.CompressedWritable":{"name":"org.apache.hadoop.io.CompressedWritable","methods":{"void write(java.io.DataOutput) throws java.io.IOException":{"name":"write","returnType":"void","args":["java.io.DataOutput"],"exceptions":["java.io.IOException"]},"void readFields(java.io.DataInput) throws java.io.IOException":{"name":"readFields","returnType":"void","args":["java.io.DataInput"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.io.ByteWritable":{"name":"org.apache.hadoop.io.ByteWritable","methods":{"int hashCode()":{"name":"hashCode","returnType":"int","args":[],"exceptions":[]},"java.lang.String toString()":{"name":"toString","returnType":"java.lang.String","args":[],"exceptions":[]},"void write(java.io.DataOutput) throws java.io.IOException":{"name":"write","returnType":"void","args":["java.io.DataOutput"],"exceptions":["java.io.IOException"]},"byte get()":{"name":"get","returnType":"byte","args":[],"exceptions":[]},"int compareTo(org.apache.hadoop.io.ByteWritable)":{"name":"compareTo","returnType":"int","args":["org.apache.hadoop.io.ByteWritable"],"exceptions":[]},"int compareTo(java.lang.Object)":{"name":"compareTo","returnType":"int","args":["java.lang.Object"],"exceptions":[]},"void set(byte)":{"name":"set","returnType":"void","args":["byte"],"exceptions":[]},"boolean equals(java.lang.Object)":{"name":"equals","returnType":"boolean","args":["java.lang.Object"],"exceptions":[]},"void readFields(java.io.DataInput) throws java.io.IOException":{"name":"readFields","returnType":"void","args":["java.io.DataInput"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.fs.ParentNotDirectoryException":{"name":"org.apache.hadoop.fs.ParentNotDirectoryException","methods":{}},"org.apache.hadoop.io.VersionMismatchException":{"name":"org.apache.hadoop.io.VersionMismatchException","methods":{"java.lang.String toString()":{"name":"toString","returnType":"java.lang.String","args":[],"exceptions":[]}}},"org.apache.hadoop.io.BytesWritable":{"name":"org.apache.hadoop.io.BytesWritable","methods":{"int hashCode()":{"name":"hashCode","returnType":"int","args":[],"exceptions":[]},"[B get()":{"name":"get","returnType":"[B","args":[],"exceptions":[]},"int getCapacity()":{"name":"getCapacity","returnType":"int","args":[],"exceptions":[]},"int getLength()":{"name":"getLength","returnType":"int","args":[],"exceptions":[]},"void set([B, int, int)":{"name":"set","returnType":"void","args":["[B","int","int"],"exceptions":[]},"void set(org.apache.hadoop.io.BytesWritable)":{"name":"set","returnType":"void","args":["org.apache.hadoop.io.BytesWritable"],"exceptions":[]},"void setCapacity(int)":{"name":"setCapacity","returnType":"void","args":["int"],"exceptions":[]},"boolean equals(java.lang.Object)":{"name":"equals","returnType":"boolean","args":["java.lang.Object"],"exceptions":[]},"int getSize()":{"name":"getSize","returnType":"int","args":[],"exceptions":[]},"java.lang.String toString()":{"name":"toString","returnType":"java.lang.String","args":[],"exceptions":[]},"void write(java.io.DataOutput) throws java.io.IOException":{"name":"write","returnType":"void","args":["java.io.DataOutput"],"exceptions":["java.io.IOException"]},"void setSize(int)":{"name":"setSize","returnType":"void","args":["int"],"exceptions":[]},"[B copyBytes()":{"name":"copyBytes","returnType":"[B","args":[],"exceptions":[]},"[B getBytes()":{"name":"getBytes","returnType":"[B","args":[],"exceptions":[]},"void readFields(java.io.DataInput) throws java.io.IOException":{"name":"readFields","returnType":"void","args":["java.io.DataInput"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.HadoopIllegalArgumentException":{"name":"org.apache.hadoop.HadoopIllegalArgumentException","methods":{}},"org.apache.hadoop.record.Record":{"name":"org.apache.hadoop.record.Record","methods":{"void deserialize(org.apache.hadoop.record.RecordInput) throws java.io.IOException":{"name":"deserialize","returnType":"void","args":["org.apache.hadoop.record.RecordInput"],"exceptions":["java.io.IOException"]},"java.lang.String toString()":{"name":"toString","returnType":"java.lang.String","args":[],"exceptions":[]},"void write(java.io.DataOutput) throws java.io.IOException":{"name":"write","returnType":"void","args":["java.io.DataOutput"],"exceptions":["java.io.IOException"]},"void serialize(org.apache.hadoop.record.RecordOutput) throws java.io.IOException":{"name":"serialize","returnType":"void","args":["org.apache.hadoop.record.RecordOutput"],"exceptions":["java.io.IOException"]},"void serialize(org.apache.hadoop.record.RecordOutput, java.lang.String) throws java.io.IOException":{"name":"serialize","returnType":"void","args":["org.apache.hadoop.record.RecordOutput","java.lang.String"],"exceptions":["java.io.IOException"]},"void deserialize(org.apache.hadoop.record.RecordInput, java.lang.String) throws java.io.IOException":{"name":"deserialize","returnType":"void","args":["org.apache.hadoop.record.RecordInput","java.lang.String"],"exceptions":["java.io.IOException"]},"int compareTo(java.lang.Object) throws java.lang.ClassCastException":{"name":"compareTo","returnType":"int","args":["java.lang.Object"],"exceptions":["java.lang.ClassCastException"]},"void readFields(java.io.DataInput) throws java.io.IOException":{"name":"readFields","returnType":"void","args":["java.io.DataInput"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.record.RecordOutput":{"name":"org.apache.hadoop.record.RecordOutput","methods":{"void writeBuffer(org.apache.hadoop.record.Buffer, java.lang.String) throws java.io.IOException":{"name":"writeBuffer","returnType":"void","args":["org.apache.hadoop.record.Buffer","java.lang.String"],"exceptions":["java.io.IOException"]},"void startMap(java.util.TreeMap, java.lang.String) throws java.io.IOException":{"name":"startMap","returnType":"void","args":["java.util.TreeMap","java.lang.String"],"exceptions":["java.io.IOException"]},"void writeBool(boolean, java.lang.String) throws java.io.IOException":{"name":"writeBool","returnType":"void","args":["boolean","java.lang.String"],"exceptions":["java.io.IOException"]},"void writeFloat(float, java.lang.String) throws java.io.IOException":{"name":"writeFloat","returnType":"void","args":["float","java.lang.String"],"exceptions":["java.io.IOException"]},"void writeInt(int, java.lang.String) throws java.io.IOException":{"name":"writeInt","returnType":"void","args":["int","java.lang.String"],"exceptions":["java.io.IOException"]},"void startRecord(org.apache.hadoop.record.Record, java.lang.String) throws java.io.IOException":{"name":"startRecord","returnType":"void","args":["org.apache.hadoop.record.Record","java.lang.String"],"exceptions":["java.io.IOException"]},"void endRecord(org.apache.hadoop.record.Record, java.lang.String) throws java.io.IOException":{"name":"endRecord","returnType":"void","args":["org.apache.hadoop.record.Record","java.lang.String"],"exceptions":["java.io.IOException"]},"void endVector(java.util.ArrayList, java.lang.String) throws java.io.IOException":{"name":"endVector","returnType":"void","args":["java.util.ArrayList","java.lang.String"],"exceptions":["java.io.IOException"]},"void endMap(java.util.TreeMap, java.lang.String) throws java.io.IOException":{"name":"endMap","returnType":"void","args":["java.util.TreeMap","java.lang.String"],"exceptions":["java.io.IOException"]},"void writeByte(byte, java.lang.String) throws java.io.IOException":{"name":"writeByte","returnType":"void","args":["byte","java.lang.String"],"exceptions":["java.io.IOException"]},"void startVector(java.util.ArrayList, java.lang.String) throws java.io.IOException":{"name":"startVector","returnType":"void","args":["java.util.ArrayList","java.lang.String"],"exceptions":["java.io.IOException"]},"void writeDouble(double, java.lang.String) throws java.io.IOException":{"name":"writeDouble","returnType":"void","args":["double","java.lang.String"],"exceptions":["java.io.IOException"]},"void writeLong(long, java.lang.String) throws java.io.IOException":{"name":"writeLong","returnType":"void","args":["long","java.lang.String"],"exceptions":["java.io.IOException"]},"void writeString(java.lang.String, java.lang.String) throws java.io.IOException":{"name":"writeString","returnType":"void","args":["java.lang.String","java.lang.String"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.io.Closeable":{"name":"org.apache.hadoop.io.Closeable","methods":{}},"org.apache.hadoop.io.WritableFactory":{"name":"org.apache.hadoop.io.WritableFactory","methods":{"org.apache.hadoop.io.Writable newInstance()":{"name":"newInstance","returnType":"org.apache.hadoop.io.Writable","args":[],"exceptions":[]}}},"org.apache.hadoop.record.CsvRecordInput":{"name":"org.apache.hadoop.record.CsvRecordInput","methods":{"long readLong(java.lang.String) throws java.io.IOException":{"name":"readLong","returnType":"long","args":["java.lang.String"],"exceptions":["java.io.IOException"]},"void endMap(java.lang.String) throws java.io.IOException":{"name":"endMap","returnType":"void","args":["java.lang.String"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.record.Buffer readBuffer(java.lang.String) throws java.io.IOException":{"name":"readBuffer","returnType":"org.apache.hadoop.record.Buffer","args":["java.lang.String"],"exceptions":["java.io.IOException"]},"void endVector(java.lang.String) throws java.io.IOException":{"name":"endVector","returnType":"void","args":["java.lang.String"],"exceptions":["java.io.IOException"]},"byte readByte(java.lang.String) throws java.io.IOException":{"name":"readByte","returnType":"byte","args":["java.lang.String"],"exceptions":["java.io.IOException"]},"float readFloat(java.lang.String) throws java.io.IOException":{"name":"readFloat","returnType":"float","args":["java.lang.String"],"exceptions":["java.io.IOException"]},"void startRecord(java.lang.String) throws java.io.IOException":{"name":"startRecord","returnType":"void","args":["java.lang.String"],"exceptions":["java.io.IOException"]},"double readDouble(java.lang.String) throws java.io.IOException":{"name":"readDouble","returnType":"double","args":["java.lang.String"],"exceptions":["java.io.IOException"]},"void endRecord(java.lang.String) throws java.io.IOException":{"name":"endRecord","returnType":"void","args":["java.lang.String"],"exceptions":["java.io.IOException"]},"java.lang.String readString(java.lang.String) throws java.io.IOException":{"name":"readString","returnType":"java.lang.String","args":["java.lang.String"],"exceptions":["java.io.IOException"]},"int readInt(java.lang.String) throws java.io.IOException":{"name":"readInt","returnType":"int","args":["java.lang.String"],"exceptions":["java.io.IOException"]},"boolean readBool(java.lang.String) throws java.io.IOException":{"name":"readBool","returnType":"boolean","args":["java.lang.String"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.record.Index startVector(java.lang.String) throws java.io.IOException":{"name":"startVector","returnType":"org.apache.hadoop.record.Index","args":["java.lang.String"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.record.Index startMap(java.lang.String) throws java.io.IOException":{"name":"startMap","returnType":"org.apache.hadoop.record.Index","args":["java.lang.String"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.util.ToolRunner":{"name":"org.apache.hadoop.util.ToolRunner","methods":{"void printGenericCommandUsage(java.io.PrintStream)":{"name":"printGenericCommandUsage","returnType":"void","args":["java.io.PrintStream"],"exceptions":[]},"boolean confirmPrompt(java.lang.String) throws java.io.IOException":{"name":"confirmPrompt","returnType":"boolean","args":["java.lang.String"],"exceptions":["java.io.IOException"]},"int run(org.apache.hadoop.util.Tool, [Ljava.lang.String;) throws java.lang.Exception":{"name":"run","returnType":"int","args":["org.apache.hadoop.util.Tool","[Ljava.lang.String;"],"exceptions":["java.lang.Exception"]},"int run(org.apache.hadoop.conf.Configuration, org.apache.hadoop.util.Tool, [Ljava.lang.String;) throws java.lang.Exception":{"name":"run","returnType":"int","args":["org.apache.hadoop.conf.Configuration","org.apache.hadoop.util.Tool","[Ljava.lang.String;"],"exceptions":["java.lang.Exception"]}}},"org.apache.hadoop.tracing.SpanReceiverInfo":{"name":"org.apache.hadoop.tracing.SpanReceiverInfo","methods":{"long getId()":{"name":"getId","returnType":"long","args":[],"exceptions":[]},"java.lang.String getClassName()":{"name":"getClassName","returnType":"java.lang.String","args":[],"exceptions":[]}}},"org.apache.hadoop.util.bloom.RemoveScheme":{"name":"org.apache.hadoop.util.bloom.RemoveScheme","methods":{}},"org.apache.hadoop.record.Index":{"name":"org.apache.hadoop.record.Index","methods":{"boolean done()":{"name":"done","returnType":"boolean","args":[],"exceptions":[]},"void incr()":{"name":"incr","returnType":"void","args":[],"exceptions":[]}}},"org.apache.hadoop.io.RawComparator":{"name":"org.apache.hadoop.io.RawComparator","methods":{"int compare([B, int, int, [B, int, int)":{"name":"compare","returnType":"int","args":["[B","int","int","[B","int","int"],"exceptions":[]}}},"org.apache.hadoop.io.MD5Hash":{"name":"org.apache.hadoop.io.MD5Hash","methods":{"void set(org.apache.hadoop.io.MD5Hash)":{"name":"set","returnType":"void","args":["org.apache.hadoop.io.MD5Hash"],"exceptions":[]},"int hashCode()":{"name":"hashCode","returnType":"int","args":[],"exceptions":[]},"org.apache.hadoop.io.MD5Hash read(java.io.DataInput) throws java.io.IOException":{"name":"read","returnType":"org.apache.hadoop.io.MD5Hash","args":["java.io.DataInput"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.io.MD5Hash digest(java.lang.String)":{"name":"digest","returnType":"org.apache.hadoop.io.MD5Hash","args":["java.lang.String"],"exceptions":[]},"java.security.MessageDigest getDigester()":{"name":"getDigester","returnType":"java.security.MessageDigest","args":[],"exceptions":[]},"int compareTo(java.lang.Object)":{"name":"compareTo","returnType":"int","args":["java.lang.Object"],"exceptions":[]},"boolean equals(java.lang.Object)":{"name":"equals","returnType":"boolean","args":["java.lang.Object"],"exceptions":[]},"org.apache.hadoop.io.MD5Hash digest([B)":{"name":"digest","returnType":"org.apache.hadoop.io.MD5Hash","args":["[B"],"exceptions":[]},"org.apache.hadoop.io.MD5Hash digest(java.io.InputStream) throws java.io.IOException":{"name":"digest","returnType":"org.apache.hadoop.io.MD5Hash","args":["java.io.InputStream"],"exceptions":["java.io.IOException"]},"void setDigest(java.lang.String)":{"name":"setDigest","returnType":"void","args":["java.lang.String"],"exceptions":[]},"java.lang.String toString()":{"name":"toString","returnType":"java.lang.String","args":[],"exceptions":[]},"void write(java.io.DataOutput) throws java.io.IOException":{"name":"write","returnType":"void","args":["java.io.DataOutput"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.io.MD5Hash digest(org.apache.hadoop.io.UTF8)":{"name":"digest","returnType":"org.apache.hadoop.io.MD5Hash","args":["org.apache.hadoop.io.UTF8"],"exceptions":[]},"int quarterDigest()":{"name":"quarterDigest","returnType":"int","args":[],"exceptions":[]},"int compareTo(org.apache.hadoop.io.MD5Hash)":{"name":"compareTo","returnType":"int","args":["org.apache.hadoop.io.MD5Hash"],"exceptions":[]},"long halfDigest()":{"name":"halfDigest","returnType":"long","args":[],"exceptions":[]},"[B getDigest()":{"name":"getDigest","returnType":"[B","args":[],"exceptions":[]},"org.apache.hadoop.io.MD5Hash digest([B, int, int)":{"name":"digest","returnType":"org.apache.hadoop.io.MD5Hash","args":["[B","int","int"],"exceptions":[]},"void readFields(java.io.DataInput) throws java.io.IOException":{"name":"readFields","returnType":"void","args":["java.io.DataInput"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.record.compiler.JMap":{"name":"org.apache.hadoop.record.compiler.JMap","methods":{}},"org.apache.hadoop.io.VIntWritable":{"name":"org.apache.hadoop.io.VIntWritable","methods":{"int hashCode()":{"name":"hashCode","returnType":"int","args":[],"exceptions":[]},"java.lang.String toString()":{"name":"toString","returnType":"java.lang.String","args":[],"exceptions":[]},"void write(java.io.DataOutput) throws java.io.IOException":{"name":"write","returnType":"void","args":["java.io.DataOutput"],"exceptions":["java.io.IOException"]},"void set(int)":{"name":"set","returnType":"void","args":["int"],"exceptions":[]},"int compareTo(org.apache.hadoop.io.VIntWritable)":{"name":"compareTo","returnType":"int","args":["org.apache.hadoop.io.VIntWritable"],"exceptions":[]},"int get()":{"name":"get","returnType":"int","args":[],"exceptions":[]},"int compareTo(java.lang.Object)":{"name":"compareTo","returnType":"int","args":["java.lang.Object"],"exceptions":[]},"boolean equals(java.lang.Object)":{"name":"equals","returnType":"boolean","args":["java.lang.Object"],"exceptions":[]},"void readFields(java.io.DataInput) throws java.io.IOException":{"name":"readFields","returnType":"void","args":["java.io.DataInput"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.fs.LocalFileSystem":{"name":"org.apache.hadoop.fs.LocalFileSystem","methods":{"void createSymlink(org.apache.hadoop.fs.Path, org.apache.hadoop.fs.Path, boolean) throws java.io.IOException":{"name":"createSymlink","returnType":"void","args":["org.apache.hadoop.fs.Path","org.apache.hadoop.fs.Path","boolean"],"exceptions":["java.io.IOException"]},"void initialize(java.net.URI, org.apache.hadoop.conf.Configuration) throws java.io.IOException":{"name":"initialize","returnType":"void","args":["java.net.URI","org.apache.hadoop.conf.Configuration"],"exceptions":["java.io.IOException"]},"void copyToLocalFile(boolean, org.apache.hadoop.fs.Path, org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"copyToLocalFile","returnType":"void","args":["boolean","org.apache.hadoop.fs.Path","org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"java.io.File pathToFile(org.apache.hadoop.fs.Path)":{"name":"pathToFile","returnType":"java.io.File","args":["org.apache.hadoop.fs.Path"],"exceptions":[]},"boolean supportsSymlinks()":{"name":"supportsSymlinks","returnType":"boolean","args":[],"exceptions":[]},"void copyFromLocalFile(boolean, org.apache.hadoop.fs.Path, org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"copyFromLocalFile","returnType":"void","args":["boolean","org.apache.hadoop.fs.Path","org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"java.lang.String getScheme()":{"name":"getScheme","returnType":"java.lang.String","args":[],"exceptions":[]},"org.apache.hadoop.fs.Path getLinkTarget(org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"getLinkTarget","returnType":"org.apache.hadoop.fs.Path","args":["org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"boolean reportChecksumFailure(org.apache.hadoop.fs.Path, org.apache.hadoop.fs.FSDataInputStream, long, org.apache.hadoop.fs.FSDataInputStream, long)":{"name":"reportChecksumFailure","returnType":"boolean","args":["org.apache.hadoop.fs.Path","org.apache.hadoop.fs.FSDataInputStream","long","org.apache.hadoop.fs.FSDataInputStream","long"],"exceptions":[]},"org.apache.hadoop.fs.FileStatus getFileLinkStatus(org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"getFileLinkStatus","returnType":"org.apache.hadoop.fs.FileStatus","args":["org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.FileSystem getRaw()":{"name":"getRaw","returnType":"org.apache.hadoop.fs.FileSystem","args":[],"exceptions":[]}}},"org.apache.hadoop.record.CsvRecordOutput":{"name":"org.apache.hadoop.record.CsvRecordOutput","methods":{"void writeBuffer(org.apache.hadoop.record.Buffer, java.lang.String) throws java.io.IOException":{"name":"writeBuffer","returnType":"void","args":["org.apache.hadoop.record.Buffer","java.lang.String"],"exceptions":["java.io.IOException"]},"void startMap(java.util.TreeMap, java.lang.String) throws java.io.IOException":{"name":"startMap","returnType":"void","args":["java.util.TreeMap","java.lang.String"],"exceptions":["java.io.IOException"]},"void writeBool(boolean, java.lang.String) throws java.io.IOException":{"name":"writeBool","returnType":"void","args":["boolean","java.lang.String"],"exceptions":["java.io.IOException"]},"void writeFloat(float, java.lang.String) throws java.io.IOException":{"name":"writeFloat","returnType":"void","args":["float","java.lang.String"],"exceptions":["java.io.IOException"]},"void writeInt(int, java.lang.String) throws java.io.IOException":{"name":"writeInt","returnType":"void","args":["int","java.lang.String"],"exceptions":["java.io.IOException"]},"void startRecord(org.apache.hadoop.record.Record, java.lang.String) throws java.io.IOException":{"name":"startRecord","returnType":"void","args":["org.apache.hadoop.record.Record","java.lang.String"],"exceptions":["java.io.IOException"]},"void endRecord(org.apache.hadoop.record.Record, java.lang.String) throws java.io.IOException":{"name":"endRecord","returnType":"void","args":["org.apache.hadoop.record.Record","java.lang.String"],"exceptions":["java.io.IOException"]},"void endVector(java.util.ArrayList, java.lang.String) throws java.io.IOException":{"name":"endVector","returnType":"void","args":["java.util.ArrayList","java.lang.String"],"exceptions":["java.io.IOException"]},"void endMap(java.util.TreeMap, java.lang.String) throws java.io.IOException":{"name":"endMap","returnType":"void","args":["java.util.TreeMap","java.lang.String"],"exceptions":["java.io.IOException"]},"void writeByte(byte, java.lang.String) throws java.io.IOException":{"name":"writeByte","returnType":"void","args":["byte","java.lang.String"],"exceptions":["java.io.IOException"]},"void startVector(java.util.ArrayList, java.lang.String) throws java.io.IOException":{"name":"startVector","returnType":"void","args":["java.util.ArrayList","java.lang.String"],"exceptions":["java.io.IOException"]},"void writeDouble(double, java.lang.String) throws java.io.IOException":{"name":"writeDouble","returnType":"void","args":["double","java.lang.String"],"exceptions":["java.io.IOException"]},"void writeLong(long, java.lang.String) throws java.io.IOException":{"name":"writeLong","returnType":"void","args":["long","java.lang.String"],"exceptions":["java.io.IOException"]},"void writeString(java.lang.String, java.lang.String) throws java.io.IOException":{"name":"writeString","returnType":"void","args":["java.lang.String","java.lang.String"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.io.WritableUtils":{"name":"org.apache.hadoop.io.WritableUtils","methods":{"int readVIntInRange(java.io.DataInput, int, int) throws java.io.IOException":{"name":"readVIntInRange","returnType":"int","args":["java.io.DataInput","int","int"],"exceptions":["java.io.IOException"]},"void writeVInt(java.io.DataOutput, int) throws java.io.IOException":{"name":"writeVInt","returnType":"void","args":["java.io.DataOutput","int"],"exceptions":["java.io.IOException"]},"[Ljava.lang.String; readStringArray(java.io.DataInput) throws java.io.IOException":{"name":"readStringArray","returnType":"[Ljava.lang.String;","args":["java.io.DataInput"],"exceptions":["java.io.IOException"]},"void cloneInto(org.apache.hadoop.io.Writable, org.apache.hadoop.io.Writable) throws java.io.IOException":{"name":"cloneInto","returnType":"void","args":["org.apache.hadoop.io.Writable","org.apache.hadoop.io.Writable"],"exceptions":["java.io.IOException"]},"int getVIntSize(long)":{"name":"getVIntSize","returnType":"int","args":["long"],"exceptions":[]},"[B readCompressedByteArray(java.io.DataInput) throws java.io.IOException":{"name":"readCompressedByteArray","returnType":"[B","args":["java.io.DataInput"],"exceptions":["java.io.IOException"]},"void writeCompressedStringArray(java.io.DataOutput, [Ljava.lang.String;) throws java.io.IOException":{"name":"writeCompressedStringArray","returnType":"void","args":["java.io.DataOutput","[Ljava.lang.String;"],"exceptions":["java.io.IOException"]},"void writeStringArray(java.io.DataOutput, [Ljava.lang.String;) throws java.io.IOException":{"name":"writeStringArray","returnType":"void","args":["java.io.DataOutput","[Ljava.lang.String;"],"exceptions":["java.io.IOException"]},"void writeString(java.io.DataOutput, java.lang.String) throws java.io.IOException":{"name":"writeString","returnType":"void","args":["java.io.DataOutput","java.lang.String"],"exceptions":["java.io.IOException"]},"void displayByteArray([B)":{"name":"displayByteArray","returnType":"void","args":["[B"],"exceptions":[]},"int writeCompressedString(java.io.DataOutput, java.lang.String) throws java.io.IOException":{"name":"writeCompressedString","returnType":"int","args":["java.io.DataOutput","java.lang.String"],"exceptions":["java.io.IOException"]},"[Ljava.lang.String; readCompressedStringArray(java.io.DataInput) throws java.io.IOException":{"name":"readCompressedStringArray","returnType":"[Ljava.lang.String;","args":["java.io.DataInput"],"exceptions":["java.io.IOException"]},"boolean isNegativeVInt(byte)":{"name":"isNegativeVInt","returnType":"boolean","args":["byte"],"exceptions":[]},"org.apache.hadoop.io.Writable clone(org.apache.hadoop.io.Writable, org.apache.hadoop.conf.Configuration)":{"name":"clone","returnType":"org.apache.hadoop.io.Writable","args":["org.apache.hadoop.io.Writable","org.apache.hadoop.conf.Configuration"],"exceptions":[]},"int decodeVIntSize(byte)":{"name":"decodeVIntSize","returnType":"int","args":["byte"],"exceptions":[]},"int readVInt(java.io.DataInput) throws java.io.IOException":{"name":"readVInt","returnType":"int","args":["java.io.DataInput"],"exceptions":["java.io.IOException"]},"int writeCompressedByteArray(java.io.DataOutput, [B) throws java.io.IOException":{"name":"writeCompressedByteArray","returnType":"int","args":["java.io.DataOutput","[B"],"exceptions":["java.io.IOException"]},"void writeEnum(java.io.DataOutput, java.lang.Enum) throws java.io.IOException":{"name":"writeEnum","returnType":"void","args":["java.io.DataOutput","java.lang.Enum"],"exceptions":["java.io.IOException"]},"[B toByteArray([Lorg.apache.hadoop.io.Writable;)":{"name":"toByteArray","returnType":"[B","args":["[Lorg.apache.hadoop.io.Writable;"],"exceptions":[]},"java.lang.String readString(java.io.DataInput) throws java.io.IOException":{"name":"readString","returnType":"java.lang.String","args":["java.io.DataInput"],"exceptions":["java.io.IOException"]},"java.lang.String readStringSafely(java.io.DataInput, int) throws java.io.IOException, java.lang.IllegalArgumentException":{"name":"readStringSafely","returnType":"java.lang.String","args":["java.io.DataInput","int"],"exceptions":["java.io.IOException","java.lang.IllegalArgumentException"]},"java.lang.Enum readEnum(java.io.DataInput, java.lang.Class) throws java.io.IOException":{"name":"readEnum","returnType":"java.lang.Enum","args":["java.io.DataInput","java.lang.Class"],"exceptions":["java.io.IOException"]},"long readVLong(java.io.DataInput) throws java.io.IOException":{"name":"readVLong","returnType":"long","args":["java.io.DataInput"],"exceptions":["java.io.IOException"]},"void skipCompressedByteArray(java.io.DataInput) throws java.io.IOException":{"name":"skipCompressedByteArray","returnType":"void","args":["java.io.DataInput"],"exceptions":["java.io.IOException"]},"void skipFully(java.io.DataInput, int) throws java.io.IOException":{"name":"skipFully","returnType":"void","args":["java.io.DataInput","int"],"exceptions":["java.io.IOException"]},"java.lang.String readCompressedString(java.io.DataInput) throws java.io.IOException":{"name":"readCompressedString","returnType":"java.lang.String","args":["java.io.DataInput"],"exceptions":["java.io.IOException"]},"void writeVLong(java.io.DataOutput, long) throws java.io.IOException":{"name":"writeVLong","returnType":"void","args":["java.io.DataOutput","long"],"exceptions":["java.io.IOException"]}}}}} \ No newline at end of file diff --git a/bigtop-tests/smoke-tests/odpi-runtime/src/test/resources/hadoop-common-bin.list b/bigtop-tests/smoke-tests/odpi-runtime/src/test/resources/hadoop-common-bin.list new file mode 100644 index 00000000..ab6cd511 --- /dev/null +++ b/bigtop-tests/smoke-tests/odpi-runtime/src/test/resources/hadoop-common-bin.list @@ -0,0 +1,2 @@ +rcc +hadoop diff --git a/bigtop-tests/smoke-tests/odpi-runtime/src/test/resources/hadoop-common-jar.list b/bigtop-tests/smoke-tests/odpi-runtime/src/test/resources/hadoop-common-jar.list new file mode 100644 index 00000000..2edbd0f9 --- /dev/null +++ b/bigtop-tests/smoke-tests/odpi-runtime/src/test/resources/hadoop-common-jar.list @@ -0,0 +1,60 @@ +api-util-1\.0\.0-M20[\.\-_].*jar +curator-recipes-2\.7\.1[\.\-_].*jar +curator-framework-2\.7\.1[\.\-_].*jar +netty-3\.6\.2\.Final[\.\-_].*jar +gson-2\.2\.4[\.\-_].*jar +paranamer-2\.3[\.\-_].*jar +jackson-core-asl-1\.9\.13[\.\-_].*jar +jackson-xc-1\.9\.13[\.\-_].*jar +jersey-server-1\.9[\.\-_].*jar +stax-api-1\.0-2[\.\-_].*jar +zookeeper-3\.4\.6[\.\-_].*jar +htrace-core-3\.1\.0-incubating[\.\-_].*jar +slf4j-api-1\.7\.10[\.\-_].*jar +avro-1\.7\.[4-7][\.\-_].*jar +slf4j-log4j12-1\.7\.10[\.\-_].*jar +curator-client-2\.7\.1[\.\-_].*jar +jets3t-0\.9\.0[\.\-_].*jar +commons-net-3\.1[\.\-_].*jar +jaxb-impl-2\.2\.3-1[\.\-_].*jar +httpclient-4\.[0-9]\.[0-9][\.\-_].*jar +apacheds-kerberos-codec-2\.0\.0-M15[\.\-_].*jar +commons-cli-1\.2[\.\-_].*jar +log4j-1\.2\.17[\.\-_].*jar +jackson-mapper-asl-1\.9\.13[\.\-_].*jar +java-xmlbuilder-0\.4[\.\-_].*jar +jsp-api-2\.1[\.\-_].*jar +guava-11\.0\.2[\.\-_].*jar +jetty-6\.1\.26[\.\-_].*jar +commons-logging-1\.1\.3[\.\-_].*jar +snappy-java-1\.0\.[45](\.[0-9])?[\.\-_].*jar +commons-httpclient-3\.1[\.\-_].*jar +jsch-0\.1\.(4[2-9]|[5-9]\d)[\.\-_].*jar +jersey-core-1\.9[\.\-_].*jar +commons-compress-1\.4\.1[\.\-_].*jar +jettison-1\.1[\.\-_].*jar +junit-4\.11[\.\-_].*jar +commons-collections-3\.2\.[12][\.\-_].*jar +xz-1\.0[\.\-_].*jar +asm-3\.2[\.\-_].*jar +commons-codec-1\.4[\.\-_].*jar +commons-digester-1\.8[\.\-_].*jar +api-asn1-api-1\.0\.0-M20[\.\-_].*jar +xmlenc-0\.52[\.\-_].*jar +commons-configuration-1\.6[\.\-_].*jar +mockito-all-1\.8\.5[\.\-_].*jar +commons-lang-2\.6[\.\-_].*jar +jetty-util-6\.1\.26[\.\-_].*jar +jsr305-3\.0\.0[\.\-_].*jar +protobuf-java-2\.5\.0[\.\-_].*jar +httpcore-4\.[0-9]\.[0-9][\.\-_].*jar +commons-io-2\.4[\.\-_].*jar +activation-1\.1[\.\-_].*jar +jersey-json-1\.9[\.\-_].*jar +jaxb-api-2\.2\.2[\.\-_].*jar +commons-math3-3\.1\.1[\.\-_].*jar +hamcrest-core-1\.3[\.\-_].*jar +commons-beanutils(-core)?-1\.[78]\.0[\.\-_].*jar +apacheds-i18n-2\.0\.0-M15[\.\-_].*jar +servlet-api-2\.5[\.\-_].*jar +jackson-jaxrs-1\.9\.13[\.\-_].*jar diff --git a/bigtop-tests/smoke-tests/odpi-runtime/src/test/resources/hadoop-common.list b/bigtop-tests/smoke-tests/odpi-runtime/src/test/resources/hadoop-common.list new file mode 100644 index 00000000..73ff182a --- /dev/null +++ b/bigtop-tests/smoke-tests/odpi-runtime/src/test/resources/hadoop-common.list @@ -0,0 +1,230 @@ +bin +bin/rcc +bin/hadoop +sbin +sbin/hadoop-daemons\.sh +sbin/hadoop-daemon\.sh +sbin/slaves\.sh +hadoop-annotations-2\.7\.[0-9][\.\-_].*jar +hadoop-common-2\.7\.[0-9][\.\-_].*jar +hadoop-annotations[\.\-_].*jar +hadoop-common-2\.7\.[0-9].*-tests\.jar +etc +etc/hadoop +hadoop-common[\.\-_].*jar +hadoop-auth-2\.7\.[0-9][\.\-_].*jar +libexec +libexec/hdfs-config\.sh +libexec/hadoop-layout\.sh +libexec/yarn-config\.sh +libexec/mapred-config\.sh +libexec/hadoop-config\.sh +libexec/init-hdfs\.sh +hadoop-auth[\.\-_].*jar +hadoop-nfs[\.\-_].*jar +hadoop-nfs-2\.7\.[0-9][\.\-_].*jar +client +client/curator-recipes[\.\-_].*jar +client/curator-recipes-2\.7\.1[\.\-_].*jar +client/commons-configuration[\.\-_].*jar +client/jsr305[\.\-_].*jar +client/slf4j-log4j12[\.\-_].*jar +client/hadoop-mapreduce-client-core[\.\-_].*jar +client/hadoop-hdfs[\.\-_].*jar +client/commons-configuration-1\.6[\.\-_].*jar +client/commons-cli-1\.2[\.\-_].*jar +client/hadoop-mapreduce-client-common-2\.7\.[0-9][\.\-_].*jar +client/commons-digester-1\.8[\.\-_].*jar +client/curator-client-2\.7\.1[\.\-_].*jar +client/httpclient[\.\-_].*jar +client/commons-beanutils(-core)?-1\.[78]\.0[\.\-_].*jar +client/jsp-api-2\.1[\.\-_].*jar +client/leveldbjni-all-1\.8[\.\-_].*jar +client/slf4j-api-1\.7\.10[\.\-_].*jar +client/hadoop-annotations-2\.7\.[0-9][\.\-_].*jar +client/jersey-core[\.\-_].*jar +client/commons-compress[\.\-_].*jar +client/stax-api[\.\-_].*jar +client/jaxb-api-2\.2\.2[\.\-_].*jar +client/api-util-1\.0\.0-M20[\.\-_].*jar +client/jackson-xc[\.\-_].*jar +client/commons-cli[\.\-_].*jar +client/xml-apis[\.\-_].*jar +client/curator-client[\.\-_].*jar +client/curator-framework-2\.7\.1[\.\-_].*jar +client/commons-io-2\.4[\.\-_].*jar +client/jackson-core-asl[\.\-_].*jar +client/avro[\.\-_].*jar +client/hadoop-mapreduce-client-app[\.\-_].*jar +client/jetty-util[\.\-_].*jar +client/guava[\.\-_].*jar +client/commons-beanutils[\.\-_].*jar +client/apacheds-i18n[\.\-_].*jar +client/jetty-util-6\.1\.26[\.\-_].*jar +client/xercesImpl-2\.9\.1[\.\-_].*jar +client/commons-logging[\.\-_].*jar +client/slf4j-api[\.\-_].*jar +client/commons-digester[\.\-_].*jar +client/avro-1\.7\.[4-7][\.\-_].*jar +client/hadoop-common-2\.7\.[0-9][\.\-_].*jar +client/commons-math3[\.\-_].*jar +client/hadoop-yarn-common-2\.7\.[0-9][\.\-_].*jar +client/hadoop-annotations[\.\-_].*jar +client/xercesImpl[\.\-_].*jar +client/commons-codec[\.\-_].*jar +client/netty-3\.6\.2\.Final[\.\-_].*jar +client/commons-collections[\.\-_].*jar +client/httpcore-4\.[0-9]\.[0-9][\.\-_].*jar +client/hadoop-mapreduce-client-jobclient[\.\-_].*jar +client/htrace-core[\.\-_].*jar +client/jersey-core-1\.9[\.\-_].*jar +client/xz[\.\-_].*jar +client/jackson-mapper-asl-1\.9\.13[\.\-_].*jar +client/jsp-api[\.\-_].*jar +client/commons-httpclient[\.\-_].*jar +client/netty[\.\-_].*jar +client/hadoop-mapreduce-client-shuffle-2\.7\.[0-9][\.\-_].*jar +client/commons-net[\.\-_].*jar +client/hadoop-yarn-server-common[\.\-_].*jar +client/jaxb-api[\.\-_].*jar +client/apacheds-kerberos-codec[\.\-_].*jar +client/httpcore[\.\-_].*jar +client/hadoop-yarn-server-common-2\.7\.[0-9][\.\-_].*jar +client/hadoop-common[\.\-_].*jar +client/leveldbjni-all[\.\-_].*jar +client/snappy-java-1\.0\.[45](\.[0-9])?[\.\-_].*jar +client/gson-2\.2\.4[\.\-_].*jar +client/commons-net-3\.1[\.\-_].*jar +client/api-util[\.\-_].*jar +client/commons-compress-1\.4\.1[\.\-_].*jar +client/jackson-xc-1\.9\.13[\.\-_].*jar +client/netty-all-4\.0\.23\.Final[\.\-_].*jar +client/xmlenc-0\.52[\.\-_].*jar +client/jackson-jaxrs[\.\-_].*jar +client/api-asn1-api[\.\-_].*jar +client/api-asn1-api-1\.0\.0-M20[\.\-_].*jar +client/commons-codec-1\.4[\.\-_].*jar +client/jackson-core-asl-1\.9\.13[\.\-_].*jar +client/servlet-api-2\.5[\.\-_].*jar +client/commons-beanutils(-core)?[\.\-_].*jar +client/paranamer-2\.3[\.\-_].*jar +client/hadoop-yarn-api-2\.7\.[0-9][\.\-_].*jar +client/hadoop-mapreduce-client-shuffle[\.\-_].*jar +client/apacheds-i18n-2\.0\.0-M15[\.\-_].*jar +client/hadoop-yarn-common[\.\-_].*jar +client/hadoop-auth-2\.7\.[0-9][\.\-_].*jar +client/snappy-java[\.\-_].*jar +client/gson[\.\-_].*jar +client/xml-apis-1\.3\.04[\.\-_].*jar +client/commons-io[\.\-_].*jar +client/commons-math3-3\.1\.1[\.\-_].*jar +client/log4j[\.\-_].*jar +client/hadoop-auth[\.\-_].*jar +client/log4j-1\.2\.17[\.\-_].*jar +client/servlet-api[\.\-_].*jar +client/hadoop-hdfs-2\.7\.[0-9][\.\-_].*jar +client/activation[\.\-_].*jar +client/zookeeper[\.\-_].*jar +client/xmlenc[\.\-_].*jar +client/stax-api-1\.0-2[\.\-_].*jar +client/hadoop-yarn-client-2\.7\.[0-9][\.\-_].*jar +client/jersey-client-1\.9[\.\-_].*jar +client/hadoop-mapreduce-client-common[\.\-_].*jar +client/xz-1\.0[\.\-_].*jar +client/zookeeper-3\.4\.6[\.\-_].*jar +client/activation-1\.1[\.\-_].*jar +client/hadoop-mapreduce-client-jobclient-2\.7\.[0-9][\.\-_].*jar +client/htrace-core-3\.1\.0-incubating[\.\-_].*jar +client/protobuf-java-2\.5\.0[\.\-_].*jar +client/hadoop-mapreduce-client-app-2\.7\.[0-9][\.\-_].*jar +client/apacheds-kerberos-codec-2\.0\.0-M15[\.\-_].*jar +client/commons-lang[\.\-_].*jar +client/httpclient-4\.[0-9]\.[0-9][\.\-_].*jar +client/paranamer[\.\-_].*jar +client/hadoop-yarn-api[\.\-_].*jar +client/jersey-client[\.\-_].*jar +client/hadoop-mapreduce-client-core-2\.7\.[0-9][\.\-_].*jar +client/curator-framework[\.\-_].*jar +client/guava-11\.0\.2[\.\-_].*jar +client/jsr305-3\.0\.0[\.\-_].*jar +client/hadoop-yarn-client[\.\-_].*jar +client/jackson-jaxrs-1\.9\.13[\.\-_].*jar +client/commons-httpclient-3\.1[\.\-_].*jar +client/commons-collections-3\.2\.[12][\.\-_].*jar +client/netty-all[\.\-_].*jar +client/slf4j-log4j12-1\.7\.10[\.\-_].*jar +client/protobuf-java[\.\-_].*jar +client/jackson-mapper-asl[\.\-_].*jar +client/commons-logging-1\.1\.3[\.\-_].*jar +client/commons-lang-2\.6[\.\-_].*jar +lib +lib/curator-recipes-2\.7\.1[\.\-_].*jar +lib/commons-configuration-1\.6[\.\-_].*jar +lib/commons-cli-1\.2[\.\-_].*jar +lib/commons-digester-1\.8[\.\-_].*jar +lib/curator-client-2\.7\.1[\.\-_].*jar +lib/commons-beanutils(-core)?-1\.[78]\.0[\.\-_].*jar +lib/jsp-api-2\.1[\.\-_].*jar +lib/jets3t-0\.9\.0[\.\-_].*jar +lib/slf4j-api-1\.7\.10[\.\-_].*jar +lib/jaxb-api-2\.2\.2[\.\-_].*jar +lib/api-util-1\.0\.0-M20[\.\-_].*jar +lib/jettison-1\.1[\.\-_].*jar +lib/curator-framework-2\.7\.1[\.\-_].*jar +lib/commons-io-2\.4[\.\-_].*jar +lib/jetty-util-6\.1\.26[\.\-_].*jar +lib/avro-1\.7\.[4-7][\.\-_].*jar +lib/jaxb-impl-2\.2\.3-1[\.\-_].*jar +lib/netty-3\.6\.2\.Final[\.\-_].*jar +lib/httpcore-4\.[0-9]\.[0-9][\.\-_].*jar +lib/jsch-0\.1\.(4[2-9]|[5-9]\d)[\.\-_].*jar +lib/jersey-core-1\.9[\.\-_].*jar +lib/jackson-mapper-asl-1\.9\.13[\.\-_].*jar +lib/snappy-java-1\.0\.[45](\.[0-9])?[\.\-_].*jar +lib/gson-2\.2\.4[\.\-_].*jar +lib/commons-net-3\.1[\.\-_].*jar +lib/asm-3\.2[\.\-_].*jar +lib/commons-compress-1\.4\.1[\.\-_].*jar +lib/mockito-all-1\.8\.5[\.\-_].*jar +lib/jackson-xc-1\.9\.13[\.\-_].*jar +lib/junit-4\.11[\.\-_].*jar +lib/jersey-json-1\.9[\.\-_].*jar +lib/xmlenc-0\.52[\.\-_].*jar +lib/api-asn1-api-1\.0\.0-M20[\.\-_].*jar +lib/commons-codec-1\.4[\.\-_].*jar +lib/jackson-core-asl-1\.9\.13[\.\-_].*jar +lib/servlet-api-2\.5[\.\-_].*jar +lib/paranamer-2\.3[\.\-_].*jar +lib/native +lib/native/libhadoop\.a +lib/native/libhadoop\.so +lib/native/libhdfs\.a +lib/native/libsnappy\.so[.0-9]* +lib/native/libsnappy\.so +lib/native/libhadoop\.so[.0-9]* +lib/native/libhadooputils\.a +lib/native/libsnappy\.so[.0-9]* +lib/native/libhadooppipes\.a +lib/jetty-6\.1\.26[\.\-_].*jar +lib/jersey-server-1\.9[\.\-_].*jar +lib/apacheds-i18n-2\.0\.0-M15[\.\-_].*jar +lib/commons-math3-3\.1\.1[\.\-_].*jar +lib/log4j-1\.2\.17[\.\-_].*jar +lib/hamcrest-core-1\.3[\.\-_].*jar +lib/stax-api-1\.0-2[\.\-_].*jar +lib/xz-1\.0[\.\-_].*jar +lib/zookeeper-3\.4\.6[\.\-_].*jar +lib/activation-1\.1[\.\-_].*jar +lib/htrace-core-3\.1\.0-incubating[\.\-_].*jar +lib/protobuf-java-2\.5\.0[\.\-_].*jar +lib/apacheds-kerberos-codec-2\.0\.0-M15[\.\-_].*jar +lib/java-xmlbuilder-0\.4[\.\-_].*jar +lib/httpclient-4\.[0-9]\.[0-9][\.\-_].*jar +lib/guava-11\.0\.2[\.\-_].*jar +lib/jsr305-3\.0\.0[\.\-_].*jar +lib/jackson-jaxrs-1\.9\.13[\.\-_].*jar +lib/commons-httpclient-3\.1[\.\-_].*jar +lib/commons-collections-3\.2\.[12][\.\-_].*jar +lib/slf4j-log4j12-1\.7\.10[\.\-_].*jar +lib/commons-logging-1\.1\.3[\.\-_].*jar +lib/commons-lang-2\.6[\.\-_].*jar diff --git a/bigtop-tests/smoke-tests/odpi-runtime/src/test/resources/hadoop-hdfs-2.7.3-api-report.json b/bigtop-tests/smoke-tests/odpi-runtime/src/test/resources/hadoop-hdfs-2.7.3-api-report.json new file mode 100644 index 00000000..b5e22655 --- /dev/null +++ b/bigtop-tests/smoke-tests/odpi-runtime/src/test/resources/hadoop-hdfs-2.7.3-api-report.json @@ -0,0 +1 @@ +{"name":"hadoop-hdfs","version":"2.7.3","classes":{"org.apache.hadoop.hdfs.server.namenode.NameNodeMXBean":{"name":"org.apache.hadoop.hdfs.server.namenode.NameNodeMXBean","methods":{"long getTotal()":{"name":"getTotal","returnType":"long","args":[],"exceptions":[]},"java.lang.String getDeadNodes()":{"name":"getDeadNodes","returnType":"java.lang.String","args":[],"exceptions":[]},"int getDistinctVersionCount()":{"name":"getDistinctVersionCount","returnType":"int","args":[],"exceptions":[]},"org.apache.hadoop.hdfs.protocol.RollingUpgradeInfo$Bean getRollingUpgradeStatus()":{"name":"getRollingUpgradeStatus","returnType":"org.apache.hadoop.hdfs.protocol.RollingUpgradeInfo$Bean","args":[],"exceptions":[]},"java.lang.String getVersion()":{"name":"getVersion","returnType":"java.lang.String","args":[],"exceptions":[]},"java.util.Map getDistinctVersions()":{"name":"getDistinctVersions","returnType":"java.util.Map","args":[],"exceptions":[]},"int getThreads()":{"name":"getThreads","returnType":"int","args":[],"exceptions":[]},"java.lang.String getJournalTransactionInfo()":{"name":"getJournalTransactionInfo","returnType":"java.lang.String","args":[],"exceptions":[]},"float getPercentBlockPoolUsed()":{"name":"getPercentBlockPoolUsed","returnType":"float","args":[],"exceptions":[]},"java.lang.String getClusterId()":{"name":"getClusterId","returnType":"java.lang.String","args":[],"exceptions":[]},"java.lang.String getLiveNodes()":{"name":"getLiveNodes","returnType":"java.lang.String","args":[],"exceptions":[]},"long getBlockPoolUsedSpace()":{"name":"getBlockPoolUsedSpace","returnType":"long","args":[],"exceptions":[]},"java.lang.String getSafemode()":{"name":"getSafemode","returnType":"java.lang.String","args":[],"exceptions":[]},"java.lang.String getCorruptFiles()":{"name":"getCorruptFiles","returnType":"java.lang.String","args":[],"exceptions":[]},"java.lang.String getSoftwareVersion()":{"name":"getSoftwareVersion","returnType":"java.lang.String","args":[],"exceptions":[]},"long getTotalFiles()":{"name":"getTotalFiles","returnType":"long","args":[],"exceptions":[]},"long getCacheUsed()":{"name":"getCacheUsed","returnType":"long","args":[],"exceptions":[]},"java.lang.String getNameDirStatuses()":{"name":"getNameDirStatuses","returnType":"java.lang.String","args":[],"exceptions":[]},"java.lang.String getCompileInfo()":{"name":"getCompileInfo","returnType":"java.lang.String","args":[],"exceptions":[]},"java.lang.String getNodeUsage()":{"name":"getNodeUsage","returnType":"java.lang.String","args":[],"exceptions":[]},"long getNumberOfMissingBlocksWithReplicationFactorOne()":{"name":"getNumberOfMissingBlocksWithReplicationFactorOne","returnType":"long","args":[],"exceptions":[]},"java.lang.String getNameJournalStatus()":{"name":"getNameJournalStatus","returnType":"java.lang.String","args":[],"exceptions":[]},"long getNonDfsUsedSpace()":{"name":"getNonDfsUsedSpace","returnType":"long","args":[],"exceptions":[]},"java.lang.String getNNStarted()":{"name":"getNNStarted","returnType":"java.lang.String","args":[],"exceptions":[]},"float getPercentRemaining()":{"name":"getPercentRemaining","returnType":"float","args":[],"exceptions":[]},"boolean isUpgradeFinalized()":{"name":"isUpgradeFinalized","returnType":"boolean","args":[],"exceptions":[]},"long getTotalBlocks()":{"name":"getTotalBlocks","returnType":"long","args":[],"exceptions":[]},"java.lang.String getBlockPoolId()":{"name":"getBlockPoolId","returnType":"java.lang.String","args":[],"exceptions":[]},"long getUsed()":{"name":"getUsed","returnType":"long","args":[],"exceptions":[]},"long getNumberOfMissingBlocks()":{"name":"getNumberOfMissingBlocks","returnType":"long","args":[],"exceptions":[]},"java.lang.String getDecomNodes()":{"name":"getDecomNodes","returnType":"java.lang.String","args":[],"exceptions":[]},"long getFree()":{"name":"getFree","returnType":"long","args":[],"exceptions":[]},"float getPercentUsed()":{"name":"getPercentUsed","returnType":"float","args":[],"exceptions":[]},"long getCacheCapacity()":{"name":"getCacheCapacity","returnType":"long","args":[],"exceptions":[]}}},"org.apache.hadoop.hdfs.server.datanode.DataNodeMXBean":{"name":"org.apache.hadoop.hdfs.server.datanode.DataNodeMXBean","methods":{"java.util.Map getDatanodeNetworkCounts()":{"name":"getDatanodeNetworkCounts","returnType":"java.util.Map","args":[],"exceptions":[]},"java.lang.String getClusterId()":{"name":"getClusterId","returnType":"java.lang.String","args":[],"exceptions":[]},"int getXceiverCount()":{"name":"getXceiverCount","returnType":"int","args":[],"exceptions":[]},"java.lang.String getHttpPort()":{"name":"getHttpPort","returnType":"java.lang.String","args":[],"exceptions":[]},"java.lang.String getVersion()":{"name":"getVersion","returnType":"java.lang.String","args":[],"exceptions":[]},"java.lang.String getNamenodeAddresses()":{"name":"getNamenodeAddresses","returnType":"java.lang.String","args":[],"exceptions":[]},"java.lang.String getVolumeInfo()":{"name":"getVolumeInfo","returnType":"java.lang.String","args":[],"exceptions":[]},"java.lang.String getRpcPort()":{"name":"getRpcPort","returnType":"java.lang.String","args":[],"exceptions":[]}}},"org.apache.hadoop.hdfs.UnknownCipherSuiteException":{"name":"org.apache.hadoop.hdfs.UnknownCipherSuiteException","methods":{}}}} \ No newline at end of file diff --git a/bigtop-tests/smoke-tests/odpi-runtime/src/test/resources/hadoop-hdfs-bin.list b/bigtop-tests/smoke-tests/odpi-runtime/src/test/resources/hadoop-hdfs-bin.list new file mode 100644 index 00000000..88879870 --- /dev/null +++ b/bigtop-tests/smoke-tests/odpi-runtime/src/test/resources/hadoop-hdfs-bin.list @@ -0,0 +1 @@ +hdfs diff --git a/bigtop-tests/smoke-tests/odpi-runtime/src/test/resources/hadoop-hdfs-jar.list b/bigtop-tests/smoke-tests/odpi-runtime/src/test/resources/hadoop-hdfs-jar.list new file mode 100644 index 00000000..8355c581 --- /dev/null +++ b/bigtop-tests/smoke-tests/odpi-runtime/src/test/resources/hadoop-hdfs-jar.list @@ -0,0 +1,25 @@ +netty-3\.6\.2\.Final[\.\-_].*jar +leveldbjni-all-1\.8[\.\-_].*jar +jackson-core-asl-1\.9\.13[\.\-_].*jar +jersey-server-1\.9[\.\-_].*jar +htrace-core-3\.1\.0-incubating[\.\-_].*jar +commons-daemon-1\.0\.13[\.\-_].*jar +commons-cli-1\.2[\.\-_].*jar +log4j-1\.2\.17[\.\-_].*jar +jackson-mapper-asl-1\.9\.13[\.\-_].*jar +guava-11\.0\.2[\.\-_].*jar +jetty-6\.1\.26[\.\-_].*jar +commons-logging-1\.1\.3[\.\-_].*jar +jersey-core-1\.9[\.\-_].*jar +asm-3\.2[\.\-_].*jar +commons-codec-1\.4[\.\-_].*jar +xml-apis-1\.3\.04[\.\-_].*jar +xercesImpl-2\.9\.1[\.\-_].*jar +xmlenc-0\.52[\.\-_].*jar +commons-lang-2\.6[\.\-_].*jar +netty-all-4\.0\.23\.Final[\.\-_].*jar +jetty-util-6\.1\.26[\.\-_].*jar +jsr305-3\.0\.0[\.\-_].*jar +protobuf-java-2\.5\.0[\.\-_].*jar +commons-io-2\.4[\.\-_].*jar +servlet-api-2\.5[\.\-_].*jar diff --git a/bigtop-tests/smoke-tests/odpi-runtime/src/test/resources/hadoop-hdfs.list b/bigtop-tests/smoke-tests/odpi-runtime/src/test/resources/hadoop-hdfs.list new file mode 100644 index 00000000..12565fd2 --- /dev/null +++ b/bigtop-tests/smoke-tests/odpi-runtime/src/test/resources/hadoop-hdfs.list @@ -0,0 +1,79 @@ +webapps +webapps/journal +webapps/journal/index\.html +webapps/journal/WEB-INF +webapps/journal/WEB-INF/web\.xml +webapps/secondary +webapps/secondary/index\.html +webapps/secondary/status\.html +webapps/secondary/WEB-INF +webapps/secondary/WEB-INF/web\.xml +webapps/secondary/snn\.js +webapps/hdfs +webapps/hdfs/dfshealth\.html +webapps/hdfs/index\.html +webapps/hdfs/explorer\.js +webapps/hdfs/dfshealth\.js +webapps/hdfs/WEB-INF +webapps/hdfs/WEB-INF/web\.xml +webapps/hdfs/explorer\.html +webapps/datanode +webapps/datanode/index\.html +webapps/datanode/robots\.txt +webapps/datanode/WEB-INF +webapps/datanode/WEB-INF/web\.xml +webapps/nfs3 +webapps/nfs3/WEB-INF +webapps/nfs3/WEB-INF/web\.xml +webapps/static +webapps/static/hadoop\.css +webapps/static/bootstrap-3\.0\.2 +webapps/static/bootstrap-3\.0\.2/fonts +webapps/static/bootstrap-3\.0\.([2-9]|[3-9]\d+).*\.svg +webapps/static/bootstrap-3\.0\.([2-9]|[3-9]\d+).*\.eot +webapps/static/bootstrap-3\.0\.([2-9]|[3-9]\d+).*\.woff +webapps/static/bootstrap-3\.0\.([2-9]|[3-9]\d+).*\.ttf +webapps/static/bootstrap-3\.0\.2/css +webapps/static/bootstrap-3\.0\.([2-9]|[3-9]\d+).*\.css +webapps/static/bootstrap-3\.0\.2/js +webapps/static/bootstrap-3\.0\.([2-9]|[3-9]\d+).*\.js +webapps/static/jquery-1\.10\.([2-9]|[3-9]\d+).*\.js +webapps/static/dust-helpers-1\.1\.([1-9]|[2-9]\d+).*\.js +webapps/static/dust-full-2\.0\.\d+.*\.js +webapps/static/dfs-dust\.js +hadoop-hdfs\.jar +bin +bin/hdfs +sbin +sbin/distribute-exclude\.sh +sbin/refresh-namenodes\.sh +hadoop-hdfs-nfs-2\.7\.([1-9]|[2-9]\d+).*\.jar +hadoop-hdfs-2\.7\.([1-9]|[2-9]\d+).*\.jar +hadoop-hdfs-2\.7\.([1-9]|[2-9]\d+).*\.jar +hadoop-hdfs-nfs\.jar +lib +lib/commons-daemon-1\.0\.(1[3-9]|[2-9]\d).*\.jar +lib/commons-cli-1\.([2-9]|[3-9]\d+).*\.jar +lib/leveldbjni-all-1\.([8-9]|[9-9]\d+).*\.jar +lib/commons-io-2\.([4-9]|[5-9]\d+).*\.jar +lib/jetty-util-6\.1\.(2[6-9]|[3-9]\d).*\.jar +lib/xercesImpl-2\.9\.([1-9]|[2-9]\d+).*\.jar +lib/netty-3\.6\.([2-9]|[3-9]\d+).*\.jar +lib/jersey-core-1\.(9|[1-9]\d+).*\.jar +lib/jackson-mapper-asl-1\.9\.(1[3-9]|[2-9]\d).*\.jar +lib/asm-3\.([2-9]|[3-9]\d+).*\.jar +lib/netty-all-4\.0\.(2[3-9]|[3-9]\d).*\.jar +lib/xmlenc-0\.(5[2-9]|[6-9]\d).*\.jar +lib/commons-codec-1\.([4-9]|[5-9]\d+).*\.jar +lib/jackson-core-asl-1\.9\.(1[3-9]|[2-9]\d).*\.jar +lib/servlet-api-2\.([5-9]|[6-9]\d+).*\.jar +lib/jetty-6\.1\.(2[6-9]|[3-9]\d).*\.jar +lib/jersey-server-1\.(9|[1-9]\d+).*\.jar +lib/xml-apis-1\.3\.(0[4-9]|[1-9]\d).*\.jar +lib/log4j-1\.2\.(1[7-9]|[2-9]\d).*\.jar +lib/htrace-core-3\.1\.\d+.*\.jar +lib/protobuf-java-2\.5\.\d+.*\.jar +lib/guava-11\.0\.([2-9]|[3-9]\d+).*\.jar +lib/jsr305-3\.0\.\d+.*\.jar +lib/commons-logging-1\.1\.([3-9]|[4-9]\d+).*\.jar +lib/commons-lang-2\.([6-9]|[7-9]\d+).*\.jar diff --git a/bigtop-tests/smoke-tests/odpi-runtime/src/test/resources/hadoop-mapreduce-bin.list b/bigtop-tests/smoke-tests/odpi-runtime/src/test/resources/hadoop-mapreduce-bin.list new file mode 100644 index 00000000..0a7a9c57 --- /dev/null +++ b/bigtop-tests/smoke-tests/odpi-runtime/src/test/resources/hadoop-mapreduce-bin.list @@ -0,0 +1 @@ +mapred diff --git a/bigtop-tests/smoke-tests/odpi-runtime/src/test/resources/hadoop-mapreduce-client-core-2.7.3-api-report.json b/bigtop-tests/smoke-tests/odpi-runtime/src/test/resources/hadoop-mapreduce-client-core-2.7.3-api-report.json new file mode 100644 index 00000000..6061c5ea --- /dev/null +++ b/bigtop-tests/smoke-tests/odpi-runtime/src/test/resources/hadoop-mapreduce-client-core-2.7.3-api-report.json @@ -0,0 +1 @@ +{"name":"hadoop-mapreduce-client-core","version":"2.7.3","classes":{"org.apache.hadoop.mapred.FixedLengthInputFormat":{"name":"org.apache.hadoop.mapred.FixedLengthInputFormat","methods":{"org.apache.hadoop.mapred.RecordReader getRecordReader(org.apache.hadoop.mapred.InputSplit, org.apache.hadoop.mapred.JobConf, org.apache.hadoop.mapred.Reporter) throws java.io.IOException":{"name":"getRecordReader","returnType":"org.apache.hadoop.mapred.RecordReader","args":["org.apache.hadoop.mapred.InputSplit","org.apache.hadoop.mapred.JobConf","org.apache.hadoop.mapred.Reporter"],"exceptions":["java.io.IOException"]},"void setRecordLength(org.apache.hadoop.conf.Configuration, int)":{"name":"setRecordLength","returnType":"void","args":["org.apache.hadoop.conf.Configuration","int"],"exceptions":[]},"void configure(org.apache.hadoop.mapred.JobConf)":{"name":"configure","returnType":"void","args":["org.apache.hadoop.mapred.JobConf"],"exceptions":[]},"int getRecordLength(org.apache.hadoop.conf.Configuration)":{"name":"getRecordLength","returnType":"int","args":["org.apache.hadoop.conf.Configuration"],"exceptions":[]}}},"org.apache.hadoop.mapred.lib.CombineFileSplit":{"name":"org.apache.hadoop.mapred.lib.CombineFileSplit","methods":{"org.apache.hadoop.mapred.JobConf getJob()":{"name":"getJob","returnType":"org.apache.hadoop.mapred.JobConf","args":[],"exceptions":[]}}},"org.apache.hadoop.mapred.lib.aggregate.ValueAggregatorMapper":{"name":"org.apache.hadoop.mapred.lib.aggregate.ValueAggregatorMapper","methods":{"void map(java.lang.Object, java.lang.Object, org.apache.hadoop.mapred.OutputCollector, org.apache.hadoop.mapred.Reporter) throws java.io.IOException":{"name":"map","returnType":"void","args":["java.lang.Object","java.lang.Object","org.apache.hadoop.mapred.OutputCollector","org.apache.hadoop.mapred.Reporter"],"exceptions":["java.io.IOException"]},"void reduce(java.lang.Object, java.util.Iterator, org.apache.hadoop.mapred.OutputCollector, org.apache.hadoop.mapred.Reporter) throws java.io.IOException":{"name":"reduce","returnType":"void","args":["java.lang.Object","java.util.Iterator","org.apache.hadoop.mapred.OutputCollector","org.apache.hadoop.mapred.Reporter"],"exceptions":["java.io.IOException"]},"void map(org.apache.hadoop.io.WritableComparable, org.apache.hadoop.io.Writable, org.apache.hadoop.mapred.OutputCollector, org.apache.hadoop.mapred.Reporter) throws java.io.IOException":{"name":"map","returnType":"void","args":["org.apache.hadoop.io.WritableComparable","org.apache.hadoop.io.Writable","org.apache.hadoop.mapred.OutputCollector","org.apache.hadoop.mapred.Reporter"],"exceptions":["java.io.IOException"]},"void reduce(org.apache.hadoop.io.Text, java.util.Iterator, org.apache.hadoop.mapred.OutputCollector, org.apache.hadoop.mapred.Reporter) throws java.io.IOException":{"name":"reduce","returnType":"void","args":["org.apache.hadoop.io.Text","java.util.Iterator","org.apache.hadoop.mapred.OutputCollector","org.apache.hadoop.mapred.Reporter"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.mapred.lib.HashPartitioner":{"name":"org.apache.hadoop.mapred.lib.HashPartitioner","methods":{"int getPartition(java.lang.Object, java.lang.Object, int)":{"name":"getPartition","returnType":"int","args":["java.lang.Object","java.lang.Object","int"],"exceptions":[]},"void configure(org.apache.hadoop.mapred.JobConf)":{"name":"configure","returnType":"void","args":["org.apache.hadoop.mapred.JobConf"],"exceptions":[]}}},"org.apache.hadoop.mapreduce.OutputFormat":{"name":"org.apache.hadoop.mapreduce.OutputFormat","methods":{"org.apache.hadoop.mapreduce.OutputCommitter getOutputCommitter(org.apache.hadoop.mapreduce.TaskAttemptContext) throws java.lang.InterruptedException, java.io.IOException":{"name":"getOutputCommitter","returnType":"org.apache.hadoop.mapreduce.OutputCommitter","args":["org.apache.hadoop.mapreduce.TaskAttemptContext"],"exceptions":["java.lang.InterruptedException","java.io.IOException"]},"void checkOutputSpecs(org.apache.hadoop.mapreduce.JobContext) throws java.lang.InterruptedException, java.io.IOException":{"name":"checkOutputSpecs","returnType":"void","args":["org.apache.hadoop.mapreduce.JobContext"],"exceptions":["java.lang.InterruptedException","java.io.IOException"]},"org.apache.hadoop.mapreduce.RecordWriter getRecordWriter(org.apache.hadoop.mapreduce.TaskAttemptContext) throws java.lang.InterruptedException, java.io.IOException":{"name":"getRecordWriter","returnType":"org.apache.hadoop.mapreduce.RecordWriter","args":["org.apache.hadoop.mapreduce.TaskAttemptContext"],"exceptions":["java.lang.InterruptedException","java.io.IOException"]}}},"org.apache.hadoop.mapreduce.CounterGroup":{"name":"org.apache.hadoop.mapreduce.CounterGroup","methods":{}},"org.apache.hadoop.mapred.lib.aggregate.ValueAggregatorJob":{"name":"org.apache.hadoop.mapred.lib.aggregate.ValueAggregatorJob","methods":{"org.apache.hadoop.mapred.jobcontrol.JobControl createValueAggregatorJobs([Ljava.lang.String;, [Ljava.lang.Class;) throws java.io.IOException":{"name":"createValueAggregatorJobs","returnType":"org.apache.hadoop.mapred.jobcontrol.JobControl","args":["[Ljava.lang.String;","[Ljava.lang.Class;"],"exceptions":["java.io.IOException"]},"void setAggregatorDescriptors(org.apache.hadoop.mapred.JobConf, [Ljava.lang.Class;)":{"name":"setAggregatorDescriptors","returnType":"void","args":["org.apache.hadoop.mapred.JobConf","[Ljava.lang.Class;"],"exceptions":[]},"org.apache.hadoop.mapred.JobConf createValueAggregatorJob([Ljava.lang.String;, [Ljava.lang.Class;) throws java.io.IOException":{"name":"createValueAggregatorJob","returnType":"org.apache.hadoop.mapred.JobConf","args":["[Ljava.lang.String;","[Ljava.lang.Class;"],"exceptions":["java.io.IOException"]},"void main([Ljava.lang.String;) throws java.io.IOException":{"name":"main","returnType":"void","args":["[Ljava.lang.String;"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.mapred.JobConf createValueAggregatorJob([Ljava.lang.String;, [Ljava.lang.Class;, java.lang.Class) throws java.io.IOException":{"name":"createValueAggregatorJob","returnType":"org.apache.hadoop.mapred.JobConf","args":["[Ljava.lang.String;","[Ljava.lang.Class;","java.lang.Class"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.mapred.jobcontrol.JobControl createValueAggregatorJobs([Ljava.lang.String;) throws java.io.IOException":{"name":"createValueAggregatorJobs","returnType":"org.apache.hadoop.mapred.jobcontrol.JobControl","args":["[Ljava.lang.String;"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.mapred.JobConf createValueAggregatorJob([Ljava.lang.String;) throws java.io.IOException":{"name":"createValueAggregatorJob","returnType":"org.apache.hadoop.mapred.JobConf","args":["[Ljava.lang.String;"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.mapred.JobConf createValueAggregatorJob([Ljava.lang.String;, java.lang.Class) throws java.io.IOException":{"name":"createValueAggregatorJob","returnType":"org.apache.hadoop.mapred.JobConf","args":["[Ljava.lang.String;","java.lang.Class"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.mapreduce.lib.input.InvalidInputException":{"name":"org.apache.hadoop.mapreduce.lib.input.InvalidInputException","methods":{"java.util.List getProblems()":{"name":"getProblems","returnType":"java.util.List","args":[],"exceptions":[]},"java.lang.String getMessage()":{"name":"getMessage","returnType":"java.lang.String","args":[],"exceptions":[]}}},"org.apache.hadoop.mapred.lib.aggregate.UserDefinedValueAggregatorDescriptor":{"name":"org.apache.hadoop.mapred.lib.aggregate.UserDefinedValueAggregatorDescriptor","methods":{"java.lang.Object createInstance(java.lang.String)":{"name":"createInstance","returnType":"java.lang.Object","args":["java.lang.String"],"exceptions":[]},"void configure(org.apache.hadoop.mapred.JobConf)":{"name":"configure","returnType":"void","args":["org.apache.hadoop.mapred.JobConf"],"exceptions":[]}}},"org.apache.hadoop.mapred.Counters$Counter":{"name":"org.apache.hadoop.mapred.Counters$Counter","methods":{"int hashCode()":{"name":"hashCode","returnType":"int","args":[],"exceptions":[]},"void setDisplayName(java.lang.String)":{"name":"setDisplayName","returnType":"void","args":["java.lang.String"],"exceptions":[]},"boolean contentEquals(org.apache.hadoop.mapred.Counters$Counter)":{"name":"contentEquals","returnType":"boolean","args":["org.apache.hadoop.mapred.Counters$Counter"],"exceptions":[]},"java.lang.String makeEscapedCompactString()":{"name":"makeEscapedCompactString","returnType":"java.lang.String","args":[],"exceptions":[]},"boolean equals(java.lang.Object)":{"name":"equals","returnType":"boolean","args":["java.lang.Object"],"exceptions":[]},"long getValue()":{"name":"getValue","returnType":"long","args":[],"exceptions":[]},"void write(java.io.DataOutput) throws java.io.IOException":{"name":"write","returnType":"void","args":["java.io.DataOutput"],"exceptions":["java.io.IOException"]},"java.lang.String getName()":{"name":"getName","returnType":"java.lang.String","args":[],"exceptions":[]},"org.apache.hadoop.mapreduce.Counter getUnderlyingCounter()":{"name":"getUnderlyingCounter","returnType":"org.apache.hadoop.mapreduce.Counter","args":[],"exceptions":[]},"void increment(long)":{"name":"increment","returnType":"void","args":["long"],"exceptions":[]},"void setValue(long)":{"name":"setValue","returnType":"void","args":["long"],"exceptions":[]},"java.lang.String getDisplayName()":{"name":"getDisplayName","returnType":"java.lang.String","args":[],"exceptions":[]},"void readFields(java.io.DataInput) throws java.io.IOException":{"name":"readFields","returnType":"void","args":["java.io.DataInput"],"exceptions":["java.io.IOException"]},"long getCounter()":{"name":"getCounter","returnType":"long","args":[],"exceptions":[]}}},"org.apache.hadoop.mapred.lib.CombineFileRecordReaderWrapper":{"name":"org.apache.hadoop.mapred.lib.CombineFileRecordReaderWrapper","methods":{"long getPos() throws java.io.IOException":{"name":"getPos","returnType":"long","args":[],"exceptions":["java.io.IOException"]},"float getProgress() throws java.io.IOException":{"name":"getProgress","returnType":"float","args":[],"exceptions":["java.io.IOException"]},"java.lang.Object createKey()":{"name":"createKey","returnType":"java.lang.Object","args":[],"exceptions":[]},"java.lang.Object createValue()":{"name":"createValue","returnType":"java.lang.Object","args":[],"exceptions":[]},"boolean next(java.lang.Object, java.lang.Object) throws java.io.IOException":{"name":"next","returnType":"boolean","args":["java.lang.Object","java.lang.Object"],"exceptions":["java.io.IOException"]},"void close() throws java.io.IOException":{"name":"close","returnType":"void","args":[],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.mapred.lib.LongSumReducer":{"name":"org.apache.hadoop.mapred.lib.LongSumReducer","methods":{"void reduce(java.lang.Object, java.util.Iterator, org.apache.hadoop.mapred.OutputCollector, org.apache.hadoop.mapred.Reporter) throws java.io.IOException":{"name":"reduce","returnType":"void","args":["java.lang.Object","java.util.Iterator","org.apache.hadoop.mapred.OutputCollector","org.apache.hadoop.mapred.Reporter"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.mapreduce.lib.input.CombineFileSplit":{"name":"org.apache.hadoop.mapreduce.lib.input.CombineFileSplit","methods":{"[Ljava.lang.String; getLocations() throws java.io.IOException":{"name":"getLocations","returnType":"[Ljava.lang.String;","args":[],"exceptions":["java.io.IOException"]},"long getLength(int)":{"name":"getLength","returnType":"long","args":["int"],"exceptions":[]},"long getLength()":{"name":"getLength","returnType":"long","args":[],"exceptions":[]},"[Lorg.apache.hadoop.fs.Path; getPaths()":{"name":"getPaths","returnType":"[Lorg.apache.hadoop.fs.Path;","args":[],"exceptions":[]},"java.lang.String toString()":{"name":"toString","returnType":"java.lang.String","args":[],"exceptions":[]},"void write(java.io.DataOutput) throws java.io.IOException":{"name":"write","returnType":"void","args":["java.io.DataOutput"],"exceptions":["java.io.IOException"]},"long getOffset(int)":{"name":"getOffset","returnType":"long","args":["int"],"exceptions":[]},"org.apache.hadoop.fs.Path getPath(int)":{"name":"getPath","returnType":"org.apache.hadoop.fs.Path","args":["int"],"exceptions":[]},"[J getLengths()":{"name":"getLengths","returnType":"[J","args":[],"exceptions":[]},"[J getStartOffsets()":{"name":"getStartOffsets","returnType":"[J","args":[],"exceptions":[]},"void readFields(java.io.DataInput) throws java.io.IOException":{"name":"readFields","returnType":"void","args":["java.io.DataInput"],"exceptions":["java.io.IOException"]},"int getNumPaths()":{"name":"getNumPaths","returnType":"int","args":[],"exceptions":[]}}},"org.apache.hadoop.mapreduce.lib.db.DBConfiguration":{"name":"org.apache.hadoop.mapreduce.lib.db.DBConfiguration","methods":{"java.lang.String getInputQuery()":{"name":"getInputQuery","returnType":"java.lang.String","args":[],"exceptions":[]},"void setInputClass(java.lang.Class)":{"name":"setInputClass","returnType":"void","args":["java.lang.Class"],"exceptions":[]},"org.apache.hadoop.conf.Configuration getConf()":{"name":"getConf","returnType":"org.apache.hadoop.conf.Configuration","args":[],"exceptions":[]},"void setOutputFieldCount(int)":{"name":"setOutputFieldCount","returnType":"void","args":["int"],"exceptions":[]},"java.lang.String getInputTableName()":{"name":"getInputTableName","returnType":"java.lang.String","args":[],"exceptions":[]},"[Ljava.lang.String; getInputFieldNames()":{"name":"getInputFieldNames","returnType":"[Ljava.lang.String;","args":[],"exceptions":[]},"void setOutputTableName(java.lang.String)":{"name":"setOutputTableName","returnType":"void","args":["java.lang.String"],"exceptions":[]},"java.sql.Connection getConnection() throws java.sql.SQLException, java.lang.ClassNotFoundException":{"name":"getConnection","returnType":"java.sql.Connection","args":[],"exceptions":["java.sql.SQLException","java.lang.ClassNotFoundException"]},"java.lang.String getInputBoundingQuery()":{"name":"getInputBoundingQuery","returnType":"java.lang.String","args":[],"exceptions":[]},"java.lang.String getInputOrderBy()":{"name":"getInputOrderBy","returnType":"java.lang.String","args":[],"exceptions":[]},"java.lang.Class getInputClass()":{"name":"getInputClass","returnType":"java.lang.Class","args":[],"exceptions":[]},"void setInputTableName(java.lang.String)":{"name":"setInputTableName","returnType":"void","args":["java.lang.String"],"exceptions":[]},"void setInputCountQuery(java.lang.String)":{"name":"setInputCountQuery","returnType":"void","args":["java.lang.String"],"exceptions":[]},"void setInputOrderBy(java.lang.String)":{"name":"setInputOrderBy","returnType":"void","args":["java.lang.String"],"exceptions":[]},"int getOutputFieldCount()":{"name":"getOutputFieldCount","returnType":"int","args":[],"exceptions":[]},"void setInputConditions(java.lang.String)":{"name":"setInputConditions","returnType":"void","args":["java.lang.String"],"exceptions":[]},"void setInputQuery(java.lang.String)":{"name":"setInputQuery","returnType":"void","args":["java.lang.String"],"exceptions":[]},"java.lang.String getInputConditions()":{"name":"getInputConditions","returnType":"java.lang.String","args":[],"exceptions":[]},"void configureDB(org.apache.hadoop.conf.Configuration, java.lang.String, java.lang.String, java.lang.String, java.lang.String)":{"name":"configureDB","returnType":"void","args":["org.apache.hadoop.conf.Configuration","java.lang.String","java.lang.String","java.lang.String","java.lang.String"],"exceptions":[]},"void configureDB(org.apache.hadoop.conf.Configuration, java.lang.String, java.lang.String)":{"name":"configureDB","returnType":"void","args":["org.apache.hadoop.conf.Configuration","java.lang.String","java.lang.String"],"exceptions":[]},"void setInputBoundingQuery(java.lang.String)":{"name":"setInputBoundingQuery","returnType":"void","args":["java.lang.String"],"exceptions":[]},"void setInputFieldNames([Ljava.lang.String;)":{"name":"setInputFieldNames","returnType":"void","args":["[Ljava.lang.String;"],"exceptions":[]},"[Ljava.lang.String; getOutputFieldNames()":{"name":"getOutputFieldNames","returnType":"[Ljava.lang.String;","args":[],"exceptions":[]},"java.lang.String getOutputTableName()":{"name":"getOutputTableName","returnType":"java.lang.String","args":[],"exceptions":[]},"void setOutputFieldNames([Ljava.lang.String;)":{"name":"setOutputFieldNames","returnType":"void","args":["[Ljava.lang.String;"],"exceptions":[]},"java.lang.String getInputCountQuery()":{"name":"getInputCountQuery","returnType":"java.lang.String","args":[],"exceptions":[]}}},"org.apache.hadoop.mapred.Partitioner":{"name":"org.apache.hadoop.mapred.Partitioner","methods":{"int getPartition(java.lang.Object, java.lang.Object, int)":{"name":"getPartition","returnType":"int","args":["java.lang.Object","java.lang.Object","int"],"exceptions":[]}}},"org.apache.hadoop.mapreduce.lib.output.FilterOutputFormat":{"name":"org.apache.hadoop.mapreduce.lib.output.FilterOutputFormat","methods":{"org.apache.hadoop.mapreduce.OutputCommitter getOutputCommitter(org.apache.hadoop.mapreduce.TaskAttemptContext) throws java.lang.InterruptedException, java.io.IOException":{"name":"getOutputCommitter","returnType":"org.apache.hadoop.mapreduce.OutputCommitter","args":["org.apache.hadoop.mapreduce.TaskAttemptContext"],"exceptions":["java.lang.InterruptedException","java.io.IOException"]},"void checkOutputSpecs(org.apache.hadoop.mapreduce.JobContext) throws java.lang.InterruptedException, java.io.IOException":{"name":"checkOutputSpecs","returnType":"void","args":["org.apache.hadoop.mapreduce.JobContext"],"exceptions":["java.lang.InterruptedException","java.io.IOException"]},"org.apache.hadoop.mapreduce.RecordWriter getRecordWriter(org.apache.hadoop.mapreduce.TaskAttemptContext) throws java.lang.InterruptedException, java.io.IOException":{"name":"getRecordWriter","returnType":"org.apache.hadoop.mapreduce.RecordWriter","args":["org.apache.hadoop.mapreduce.TaskAttemptContext"],"exceptions":["java.lang.InterruptedException","java.io.IOException"]}}},"org.apache.hadoop.mapreduce.lib.reduce.LongSumReducer":{"name":"org.apache.hadoop.mapreduce.lib.reduce.LongSumReducer","methods":{"void reduce(java.lang.Object, java.lang.Iterable, org.apache.hadoop.mapreduce.Reducer$Context) throws java.lang.InterruptedException, java.io.IOException":{"name":"reduce","returnType":"void","args":["java.lang.Object","java.lang.Iterable","org.apache.hadoop.mapreduce.Reducer$Context"],"exceptions":["java.lang.InterruptedException","java.io.IOException"]}}},"org.apache.hadoop.mapred.lib.aggregate.ValueAggregatorJobBase":{"name":"org.apache.hadoop.mapred.lib.aggregate.ValueAggregatorJobBase","methods":{"void configure(org.apache.hadoop.mapred.JobConf)":{"name":"configure","returnType":"void","args":["org.apache.hadoop.mapred.JobConf"],"exceptions":[]},"void close() throws java.io.IOException":{"name":"close","returnType":"void","args":[],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.mapreduce.lib.input.KeyValueLineRecordReader":{"name":"org.apache.hadoop.mapreduce.lib.input.KeyValueLineRecordReader","methods":{"java.lang.Object getCurrentValue() throws java.lang.InterruptedException, java.io.IOException":{"name":"getCurrentValue","returnType":"java.lang.Object","args":[],"exceptions":["java.lang.InterruptedException","java.io.IOException"]},"int findSeparator([B, int, int, byte)":{"name":"findSeparator","returnType":"int","args":["[B","int","int","byte"],"exceptions":[]},"java.lang.Class getKeyClass()":{"name":"getKeyClass","returnType":"java.lang.Class","args":[],"exceptions":[]},"org.apache.hadoop.io.Text getCurrentValue()":{"name":"getCurrentValue","returnType":"org.apache.hadoop.io.Text","args":[],"exceptions":[]},"float getProgress() throws java.io.IOException":{"name":"getProgress","returnType":"float","args":[],"exceptions":["java.io.IOException"]},"void initialize(org.apache.hadoop.mapreduce.InputSplit, org.apache.hadoop.mapreduce.TaskAttemptContext) throws java.io.IOException":{"name":"initialize","returnType":"void","args":["org.apache.hadoop.mapreduce.InputSplit","org.apache.hadoop.mapreduce.TaskAttemptContext"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.io.Text getCurrentKey()":{"name":"getCurrentKey","returnType":"org.apache.hadoop.io.Text","args":[],"exceptions":[]},"boolean nextKeyValue() throws java.io.IOException":{"name":"nextKeyValue","returnType":"boolean","args":[],"exceptions":["java.io.IOException"]},"void setKeyValue(org.apache.hadoop.io.Text, org.apache.hadoop.io.Text, [B, int, int)":{"name":"setKeyValue","returnType":"void","args":["org.apache.hadoop.io.Text","org.apache.hadoop.io.Text","[B","int","int"],"exceptions":[]},"java.lang.Object getCurrentKey() throws java.lang.InterruptedException, java.io.IOException":{"name":"getCurrentKey","returnType":"java.lang.Object","args":[],"exceptions":["java.lang.InterruptedException","java.io.IOException"]},"void close() throws java.io.IOException":{"name":"close","returnType":"void","args":[],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.mapreduce.lib.join.ComposableRecordReader":{"name":"org.apache.hadoop.mapreduce.lib.join.ComposableRecordReader","methods":{}},"org.apache.hadoop.mapreduce.lib.input.SequenceFileInputFilter":{"name":"org.apache.hadoop.mapreduce.lib.input.SequenceFileInputFilter","methods":{"void setFilterClass(org.apache.hadoop.mapreduce.Job, java.lang.Class)":{"name":"setFilterClass","returnType":"void","args":["org.apache.hadoop.mapreduce.Job","java.lang.Class"],"exceptions":[]},"org.apache.hadoop.mapreduce.RecordReader createRecordReader(org.apache.hadoop.mapreduce.InputSplit, org.apache.hadoop.mapreduce.TaskAttemptContext) throws java.io.IOException":{"name":"createRecordReader","returnType":"org.apache.hadoop.mapreduce.RecordReader","args":["org.apache.hadoop.mapreduce.InputSplit","org.apache.hadoop.mapreduce.TaskAttemptContext"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.mapreduce.lib.chain.ChainMapper":{"name":"org.apache.hadoop.mapreduce.lib.chain.ChainMapper","methods":{"void run(org.apache.hadoop.mapreduce.Mapper$Context) throws java.lang.InterruptedException, java.io.IOException":{"name":"run","returnType":"void","args":["org.apache.hadoop.mapreduce.Mapper$Context"],"exceptions":["java.lang.InterruptedException","java.io.IOException"]},"void addMapper(org.apache.hadoop.mapreduce.Job, java.lang.Class, java.lang.Class, java.lang.Class, java.lang.Class, java.lang.Class, org.apache.hadoop.conf.Configuration) throws java.io.IOException":{"name":"addMapper","returnType":"void","args":["org.apache.hadoop.mapreduce.Job","java.lang.Class","java.lang.Class","java.lang.Class","java.lang.Class","java.lang.Class","org.apache.hadoop.conf.Configuration"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.mapred.RecordReader":{"name":"org.apache.hadoop.mapred.RecordReader","methods":{"long getPos() throws java.io.IOException":{"name":"getPos","returnType":"long","args":[],"exceptions":["java.io.IOException"]},"float getProgress() throws java.io.IOException":{"name":"getProgress","returnType":"float","args":[],"exceptions":["java.io.IOException"]},"java.lang.Object createKey()":{"name":"createKey","returnType":"java.lang.Object","args":[],"exceptions":[]},"java.lang.Object createValue()":{"name":"createValue","returnType":"java.lang.Object","args":[],"exceptions":[]},"boolean next(java.lang.Object, java.lang.Object) throws java.io.IOException":{"name":"next","returnType":"boolean","args":["java.lang.Object","java.lang.Object"],"exceptions":["java.io.IOException"]},"void close() throws java.io.IOException":{"name":"close","returnType":"void","args":[],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.mapred.lib.aggregate.ValueAggregatorBaseDescriptor":{"name":"org.apache.hadoop.mapred.lib.aggregate.ValueAggregatorBaseDescriptor","methods":{"org.apache.hadoop.mapred.lib.aggregate.ValueAggregator generateValueAggregator(java.lang.String)":{"name":"generateValueAggregator","returnType":"org.apache.hadoop.mapred.lib.aggregate.ValueAggregator","args":["java.lang.String"],"exceptions":[]},"void configure(org.apache.hadoop.mapred.JobConf)":{"name":"configure","returnType":"void","args":["org.apache.hadoop.mapred.JobConf"],"exceptions":[]},"java.util.Map$Entry generateEntry(java.lang.String, java.lang.String, org.apache.hadoop.io.Text)":{"name":"generateEntry","returnType":"java.util.Map$Entry","args":["java.lang.String","java.lang.String","org.apache.hadoop.io.Text"],"exceptions":[]}}},"org.apache.hadoop.mapred.FileOutputFormat":{"name":"org.apache.hadoop.mapred.FileOutputFormat","methods":{"void setOutputPath(org.apache.hadoop.mapred.JobConf, org.apache.hadoop.fs.Path)":{"name":"setOutputPath","returnType":"void","args":["org.apache.hadoop.mapred.JobConf","org.apache.hadoop.fs.Path"],"exceptions":[]},"org.apache.hadoop.fs.Path getTaskOutputPath(org.apache.hadoop.mapred.JobConf, java.lang.String) throws java.io.IOException":{"name":"getTaskOutputPath","returnType":"org.apache.hadoop.fs.Path","args":["org.apache.hadoop.mapred.JobConf","java.lang.String"],"exceptions":["java.io.IOException"]},"void setOutputCompressorClass(org.apache.hadoop.mapred.JobConf, java.lang.Class)":{"name":"setOutputCompressorClass","returnType":"void","args":["org.apache.hadoop.mapred.JobConf","java.lang.Class"],"exceptions":[]},"java.lang.Class getOutputCompressorClass(org.apache.hadoop.mapred.JobConf, java.lang.Class)":{"name":"getOutputCompressorClass","returnType":"java.lang.Class","args":["org.apache.hadoop.mapred.JobConf","java.lang.Class"],"exceptions":[]},"void setCompressOutput(org.apache.hadoop.mapred.JobConf, boolean)":{"name":"setCompressOutput","returnType":"void","args":["org.apache.hadoop.mapred.JobConf","boolean"],"exceptions":[]},"java.lang.String getUniqueName(org.apache.hadoop.mapred.JobConf, java.lang.String)":{"name":"getUniqueName","returnType":"java.lang.String","args":["org.apache.hadoop.mapred.JobConf","java.lang.String"],"exceptions":[]},"org.apache.hadoop.fs.Path getOutputPath(org.apache.hadoop.mapred.JobConf)":{"name":"getOutputPath","returnType":"org.apache.hadoop.fs.Path","args":["org.apache.hadoop.mapred.JobConf"],"exceptions":[]},"void checkOutputSpecs(org.apache.hadoop.fs.FileSystem, org.apache.hadoop.mapred.JobConf) throws org.apache.hadoop.mapred.InvalidJobConfException, java.io.IOException, org.apache.hadoop.mapred.FileAlreadyExistsException":{"name":"checkOutputSpecs","returnType":"void","args":["org.apache.hadoop.fs.FileSystem","org.apache.hadoop.mapred.JobConf"],"exceptions":["org.apache.hadoop.mapred.InvalidJobConfException","java.io.IOException","org.apache.hadoop.mapred.FileAlreadyExistsException"]},"org.apache.hadoop.fs.Path getPathForCustomFile(org.apache.hadoop.mapred.JobConf, java.lang.String)":{"name":"getPathForCustomFile","returnType":"org.apache.hadoop.fs.Path","args":["org.apache.hadoop.mapred.JobConf","java.lang.String"],"exceptions":[]},"void setWorkOutputPath(org.apache.hadoop.mapred.JobConf, org.apache.hadoop.fs.Path)":{"name":"setWorkOutputPath","returnType":"void","args":["org.apache.hadoop.mapred.JobConf","org.apache.hadoop.fs.Path"],"exceptions":[]},"boolean getCompressOutput(org.apache.hadoop.mapred.JobConf)":{"name":"getCompressOutput","returnType":"boolean","args":["org.apache.hadoop.mapred.JobConf"],"exceptions":[]},"org.apache.hadoop.fs.Path getWorkOutputPath(org.apache.hadoop.mapred.JobConf)":{"name":"getWorkOutputPath","returnType":"org.apache.hadoop.fs.Path","args":["org.apache.hadoop.mapred.JobConf"],"exceptions":[]},"org.apache.hadoop.mapred.RecordWriter getRecordWriter(org.apache.hadoop.fs.FileSystem, org.apache.hadoop.mapred.JobConf, java.lang.String, org.apache.hadoop.util.Progressable) throws java.io.IOException":{"name":"getRecordWriter","returnType":"org.apache.hadoop.mapred.RecordWriter","args":["org.apache.hadoop.fs.FileSystem","org.apache.hadoop.mapred.JobConf","java.lang.String","org.apache.hadoop.util.Progressable"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.mapreduce.lib.join.CompositeRecordReader":{"name":"org.apache.hadoop.mapreduce.lib.join.CompositeRecordReader","methods":{"void accept(org.apache.hadoop.mapreduce.lib.join.CompositeRecordReader$JoinCollector, org.apache.hadoop.io.WritableComparable) throws java.lang.InterruptedException, java.io.IOException":{"name":"accept","returnType":"void","args":["org.apache.hadoop.mapreduce.lib.join.CompositeRecordReader$JoinCollector","org.apache.hadoop.io.WritableComparable"],"exceptions":["java.lang.InterruptedException","java.io.IOException"]},"int id()":{"name":"id","returnType":"int","args":[],"exceptions":[]},"void initialize(org.apache.hadoop.mapreduce.InputSplit, org.apache.hadoop.mapreduce.TaskAttemptContext) throws java.lang.InterruptedException, java.io.IOException":{"name":"initialize","returnType":"void","args":["org.apache.hadoop.mapreduce.InputSplit","org.apache.hadoop.mapreduce.TaskAttemptContext"],"exceptions":["java.lang.InterruptedException","java.io.IOException"]},"void skip(org.apache.hadoop.io.WritableComparable) throws java.lang.InterruptedException, java.io.IOException":{"name":"skip","returnType":"void","args":["org.apache.hadoop.io.WritableComparable"],"exceptions":["java.lang.InterruptedException","java.io.IOException"]},"int compareTo(org.apache.hadoop.mapreduce.lib.join.ComposableRecordReader)":{"name":"compareTo","returnType":"int","args":["org.apache.hadoop.mapreduce.lib.join.ComposableRecordReader"],"exceptions":[]},"org.apache.hadoop.conf.Configuration getConf()":{"name":"getConf","returnType":"org.apache.hadoop.conf.Configuration","args":[],"exceptions":[]},"org.apache.hadoop.io.WritableComparable key()":{"name":"key","returnType":"org.apache.hadoop.io.WritableComparable","args":[],"exceptions":[]},"float getProgress() throws java.lang.InterruptedException, java.io.IOException":{"name":"getProgress","returnType":"float","args":[],"exceptions":["java.lang.InterruptedException","java.io.IOException"]},"int compareTo(java.lang.Object)":{"name":"compareTo","returnType":"int","args":["java.lang.Object"],"exceptions":[]},"boolean hasNext()":{"name":"hasNext","returnType":"boolean","args":[],"exceptions":[]},"org.apache.hadoop.io.Writable getCurrentValue() throws java.lang.InterruptedException, java.io.IOException":{"name":"getCurrentValue","returnType":"org.apache.hadoop.io.Writable","args":[],"exceptions":["java.lang.InterruptedException","java.io.IOException"]},"java.lang.Object getCurrentValue() throws java.lang.InterruptedException, java.io.IOException":{"name":"getCurrentValue","returnType":"java.lang.Object","args":[],"exceptions":["java.lang.InterruptedException","java.io.IOException"]},"void add(org.apache.hadoop.mapreduce.lib.join.ComposableRecordReader) throws java.lang.InterruptedException, java.io.IOException":{"name":"add","returnType":"void","args":["org.apache.hadoop.mapreduce.lib.join.ComposableRecordReader"],"exceptions":["java.lang.InterruptedException","java.io.IOException"]},"void setConf(org.apache.hadoop.conf.Configuration)":{"name":"setConf","returnType":"void","args":["org.apache.hadoop.conf.Configuration"],"exceptions":[]},"org.apache.hadoop.io.WritableComparable getCurrentKey()":{"name":"getCurrentKey","returnType":"org.apache.hadoop.io.WritableComparable","args":[],"exceptions":[]},"java.lang.Object getCurrentKey() throws java.lang.InterruptedException, java.io.IOException":{"name":"getCurrentKey","returnType":"java.lang.Object","args":[],"exceptions":["java.lang.InterruptedException","java.io.IOException"]},"void close() throws java.io.IOException":{"name":"close","returnType":"void","args":[],"exceptions":["java.io.IOException"]},"void key(org.apache.hadoop.io.WritableComparable) throws java.io.IOException":{"name":"key","returnType":"void","args":["org.apache.hadoop.io.WritableComparable"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.mapred.join.JoinRecordReader":{"name":"org.apache.hadoop.mapred.join.JoinRecordReader","methods":{"org.apache.hadoop.mapred.join.TupleWritable createValue()":{"name":"createValue","returnType":"org.apache.hadoop.mapred.join.TupleWritable","args":[],"exceptions":[]},"java.lang.Object createKey()":{"name":"createKey","returnType":"java.lang.Object","args":[],"exceptions":[]},"java.lang.Object createValue()":{"name":"createValue","returnType":"java.lang.Object","args":[],"exceptions":[]},"boolean next(org.apache.hadoop.io.WritableComparable, org.apache.hadoop.mapred.join.TupleWritable) throws java.io.IOException":{"name":"next","returnType":"boolean","args":["org.apache.hadoop.io.WritableComparable","org.apache.hadoop.mapred.join.TupleWritable"],"exceptions":["java.io.IOException"]},"int compareTo(java.lang.Object)":{"name":"compareTo","returnType":"int","args":["java.lang.Object"],"exceptions":[]},"boolean next(java.lang.Object, java.lang.Object) throws java.io.IOException":{"name":"next","returnType":"boolean","args":["java.lang.Object","java.lang.Object"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.mapreduce.lib.output.LazyOutputFormat":{"name":"org.apache.hadoop.mapreduce.lib.output.LazyOutputFormat","methods":{"void setOutputFormatClass(org.apache.hadoop.mapreduce.Job, java.lang.Class)":{"name":"setOutputFormatClass","returnType":"void","args":["org.apache.hadoop.mapreduce.Job","java.lang.Class"],"exceptions":[]},"org.apache.hadoop.mapreduce.OutputCommitter getOutputCommitter(org.apache.hadoop.mapreduce.TaskAttemptContext) throws java.lang.InterruptedException, java.io.IOException":{"name":"getOutputCommitter","returnType":"org.apache.hadoop.mapreduce.OutputCommitter","args":["org.apache.hadoop.mapreduce.TaskAttemptContext"],"exceptions":["java.lang.InterruptedException","java.io.IOException"]},"void checkOutputSpecs(org.apache.hadoop.mapreduce.JobContext) throws java.lang.InterruptedException, java.io.IOException":{"name":"checkOutputSpecs","returnType":"void","args":["org.apache.hadoop.mapreduce.JobContext"],"exceptions":["java.lang.InterruptedException","java.io.IOException"]},"org.apache.hadoop.mapreduce.RecordWriter getRecordWriter(org.apache.hadoop.mapreduce.TaskAttemptContext) throws java.lang.InterruptedException, java.io.IOException":{"name":"getRecordWriter","returnType":"org.apache.hadoop.mapreduce.RecordWriter","args":["org.apache.hadoop.mapreduce.TaskAttemptContext"],"exceptions":["java.lang.InterruptedException","java.io.IOException"]}}},"org.apache.hadoop.mapred.join.OuterJoinRecordReader":{"name":"org.apache.hadoop.mapred.join.OuterJoinRecordReader","methods":{}},"org.apache.hadoop.mapred.TextOutputFormat":{"name":"org.apache.hadoop.mapred.TextOutputFormat","methods":{"org.apache.hadoop.mapred.RecordWriter getRecordWriter(org.apache.hadoop.fs.FileSystem, org.apache.hadoop.mapred.JobConf, java.lang.String, org.apache.hadoop.util.Progressable) throws java.io.IOException":{"name":"getRecordWriter","returnType":"org.apache.hadoop.mapred.RecordWriter","args":["org.apache.hadoop.fs.FileSystem","org.apache.hadoop.mapred.JobConf","java.lang.String","org.apache.hadoop.util.Progressable"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.mapred.pipes.Submitter":{"name":"org.apache.hadoop.mapred.pipes.Submitter","methods":{"boolean getKeepCommandFile(org.apache.hadoop.mapred.JobConf)":{"name":"getKeepCommandFile","returnType":"boolean","args":["org.apache.hadoop.mapred.JobConf"],"exceptions":[]},"org.apache.hadoop.mapred.RunningJob jobSubmit(org.apache.hadoop.mapred.JobConf) throws java.io.IOException":{"name":"jobSubmit","returnType":"org.apache.hadoop.mapred.RunningJob","args":["org.apache.hadoop.mapred.JobConf"],"exceptions":["java.io.IOException"]},"void setIsJavaMapper(org.apache.hadoop.mapred.JobConf, boolean)":{"name":"setIsJavaMapper","returnType":"void","args":["org.apache.hadoop.mapred.JobConf","boolean"],"exceptions":[]},"void setIsJavaRecordWriter(org.apache.hadoop.mapred.JobConf, boolean)":{"name":"setIsJavaRecordWriter","returnType":"void","args":["org.apache.hadoop.mapred.JobConf","boolean"],"exceptions":[]},"void main([Ljava.lang.String;) throws java.lang.Exception":{"name":"main","returnType":"void","args":["[Ljava.lang.String;"],"exceptions":["java.lang.Exception"]},"boolean getIsJavaRecordWriter(org.apache.hadoop.mapred.JobConf)":{"name":"getIsJavaRecordWriter","returnType":"boolean","args":["org.apache.hadoop.mapred.JobConf"],"exceptions":[]},"boolean getIsJavaReducer(org.apache.hadoop.mapred.JobConf)":{"name":"getIsJavaReducer","returnType":"boolean","args":["org.apache.hadoop.mapred.JobConf"],"exceptions":[]},"org.apache.hadoop.mapred.RunningJob submitJob(org.apache.hadoop.mapred.JobConf) throws java.io.IOException":{"name":"submitJob","returnType":"org.apache.hadoop.mapred.RunningJob","args":["org.apache.hadoop.mapred.JobConf"],"exceptions":["java.io.IOException"]},"void setIsJavaRecordReader(org.apache.hadoop.mapred.JobConf, boolean)":{"name":"setIsJavaRecordReader","returnType":"void","args":["org.apache.hadoop.mapred.JobConf","boolean"],"exceptions":[]},"int run([Ljava.lang.String;) throws java.lang.Exception":{"name":"run","returnType":"int","args":["[Ljava.lang.String;"],"exceptions":["java.lang.Exception"]},"java.lang.String getExecutable(org.apache.hadoop.mapred.JobConf)":{"name":"getExecutable","returnType":"java.lang.String","args":["org.apache.hadoop.mapred.JobConf"],"exceptions":[]},"void setKeepCommandFile(org.apache.hadoop.mapred.JobConf, boolean)":{"name":"setKeepCommandFile","returnType":"void","args":["org.apache.hadoop.mapred.JobConf","boolean"],"exceptions":[]},"void setIsJavaReducer(org.apache.hadoop.mapred.JobConf, boolean)":{"name":"setIsJavaReducer","returnType":"void","args":["org.apache.hadoop.mapred.JobConf","boolean"],"exceptions":[]},"void setExecutable(org.apache.hadoop.mapred.JobConf, java.lang.String)":{"name":"setExecutable","returnType":"void","args":["org.apache.hadoop.mapred.JobConf","java.lang.String"],"exceptions":[]},"org.apache.hadoop.mapred.RunningJob runJob(org.apache.hadoop.mapred.JobConf) throws java.io.IOException":{"name":"runJob","returnType":"org.apache.hadoop.mapred.RunningJob","args":["org.apache.hadoop.mapred.JobConf"],"exceptions":["java.io.IOException"]},"boolean getIsJavaMapper(org.apache.hadoop.mapred.JobConf)":{"name":"getIsJavaMapper","returnType":"boolean","args":["org.apache.hadoop.mapred.JobConf"],"exceptions":[]},"boolean getIsJavaRecordReader(org.apache.hadoop.mapred.JobConf)":{"name":"getIsJavaRecordReader","returnType":"boolean","args":["org.apache.hadoop.mapred.JobConf"],"exceptions":[]}}},"org.apache.hadoop.mapred.lib.CombineFileInputFormat":{"name":"org.apache.hadoop.mapred.lib.CombineFileInputFormat","methods":{"org.apache.hadoop.mapred.RecordReader getRecordReader(org.apache.hadoop.mapred.InputSplit, org.apache.hadoop.mapred.JobConf, org.apache.hadoop.mapred.Reporter) throws java.io.IOException":{"name":"getRecordReader","returnType":"org.apache.hadoop.mapred.RecordReader","args":["org.apache.hadoop.mapred.InputSplit","org.apache.hadoop.mapred.JobConf","org.apache.hadoop.mapred.Reporter"],"exceptions":["java.io.IOException"]},"[Lorg.apache.hadoop.mapred.InputSplit; getSplits(org.apache.hadoop.mapred.JobConf, int) throws java.io.IOException":{"name":"getSplits","returnType":"[Lorg.apache.hadoop.mapred.InputSplit;","args":["org.apache.hadoop.mapred.JobConf","int"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.mapreduce.RecordReader createRecordReader(org.apache.hadoop.mapreduce.InputSplit, org.apache.hadoop.mapreduce.TaskAttemptContext) throws java.io.IOException":{"name":"createRecordReader","returnType":"org.apache.hadoop.mapreduce.RecordReader","args":["org.apache.hadoop.mapreduce.InputSplit","org.apache.hadoop.mapreduce.TaskAttemptContext"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.mapred.lib.aggregate.ValueAggregatorDescriptor":{"name":"org.apache.hadoop.mapred.lib.aggregate.ValueAggregatorDescriptor","methods":{"void configure(org.apache.hadoop.mapred.JobConf)":{"name":"configure","returnType":"void","args":["org.apache.hadoop.mapred.JobConf"],"exceptions":[]}}},"org.apache.hadoop.mapred.ClusterStatus":{"name":"org.apache.hadoop.mapred.ClusterStatus","methods":{"int getTaskTrackers()":{"name":"getTaskTrackers","returnType":"int","args":[],"exceptions":[]},"int getMaxMapTasks()":{"name":"getMaxMapTasks","returnType":"int","args":[],"exceptions":[]},"long getMaxMemory()":{"name":"getMaxMemory","returnType":"long","args":[],"exceptions":[]},"int getMaxReduceTasks()":{"name":"getMaxReduceTasks","returnType":"int","args":[],"exceptions":[]},"java.util.Collection getGraylistedTrackerNames()":{"name":"getGraylistedTrackerNames","returnType":"java.util.Collection","args":[],"exceptions":[]},"org.apache.hadoop.mapreduce.Cluster$JobTrackerStatus getJobTrackerStatus()":{"name":"getJobTrackerStatus","returnType":"org.apache.hadoop.mapreduce.Cluster$JobTrackerStatus","args":[],"exceptions":[]},"int getReduceTasks()":{"name":"getReduceTasks","returnType":"int","args":[],"exceptions":[]},"int getGraylistedTrackers()":{"name":"getGraylistedTrackers","returnType":"int","args":[],"exceptions":[]},"long getTTExpiryInterval()":{"name":"getTTExpiryInterval","returnType":"long","args":[],"exceptions":[]},"long getUsedMemory()":{"name":"getUsedMemory","returnType":"long","args":[],"exceptions":[]},"java.util.Collection getActiveTrackerNames()":{"name":"getActiveTrackerNames","returnType":"java.util.Collection","args":[],"exceptions":[]},"void write(java.io.DataOutput) throws java.io.IOException":{"name":"write","returnType":"void","args":["java.io.DataOutput"],"exceptions":["java.io.IOException"]},"int getMapTasks()":{"name":"getMapTasks","returnType":"int","args":[],"exceptions":[]},"org.apache.hadoop.mapred.JobTracker$State getJobTrackerState()":{"name":"getJobTrackerState","returnType":"org.apache.hadoop.mapred.JobTracker$State","args":[],"exceptions":[]},"int getBlacklistedTrackers()":{"name":"getBlacklistedTrackers","returnType":"int","args":[],"exceptions":[]},"java.util.Collection getBlacklistedTrackerNames()":{"name":"getBlacklistedTrackerNames","returnType":"java.util.Collection","args":[],"exceptions":[]},"java.util.Collection getBlackListedTrackersInfo()":{"name":"getBlackListedTrackersInfo","returnType":"java.util.Collection","args":[],"exceptions":[]},"void readFields(java.io.DataInput) throws java.io.IOException":{"name":"readFields","returnType":"void","args":["java.io.DataInput"],"exceptions":["java.io.IOException"]},"int getNumExcludedNodes()":{"name":"getNumExcludedNodes","returnType":"int","args":[],"exceptions":[]}}},"org.apache.hadoop.mapred.MapReduceBase":{"name":"org.apache.hadoop.mapred.MapReduceBase","methods":{"void configure(org.apache.hadoop.mapred.JobConf)":{"name":"configure","returnType":"void","args":["org.apache.hadoop.mapred.JobConf"],"exceptions":[]},"void close() throws java.io.IOException":{"name":"close","returnType":"void","args":[],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.mapred.join.TupleWritable":{"name":"org.apache.hadoop.mapred.join.TupleWritable","methods":{}},"org.apache.hadoop.mapred.ID":{"name":"org.apache.hadoop.mapred.ID","methods":{}},"org.apache.hadoop.mapred.lib.RegexMapper":{"name":"org.apache.hadoop.mapred.lib.RegexMapper","methods":{"void map(java.lang.Object, java.lang.Object, org.apache.hadoop.mapred.OutputCollector, org.apache.hadoop.mapred.Reporter) throws java.io.IOException":{"name":"map","returnType":"void","args":["java.lang.Object","java.lang.Object","org.apache.hadoop.mapred.OutputCollector","org.apache.hadoop.mapred.Reporter"],"exceptions":["java.io.IOException"]},"void map(java.lang.Object, org.apache.hadoop.io.Text, org.apache.hadoop.mapred.OutputCollector, org.apache.hadoop.mapred.Reporter) throws java.io.IOException":{"name":"map","returnType":"void","args":["java.lang.Object","org.apache.hadoop.io.Text","org.apache.hadoop.mapred.OutputCollector","org.apache.hadoop.mapred.Reporter"],"exceptions":["java.io.IOException"]},"void configure(org.apache.hadoop.mapred.JobConf)":{"name":"configure","returnType":"void","args":["org.apache.hadoop.mapred.JobConf"],"exceptions":[]}}},"org.apache.hadoop.mapreduce.lib.input.SequenceFileInputFormat":{"name":"org.apache.hadoop.mapreduce.lib.input.SequenceFileInputFormat","methods":{"org.apache.hadoop.mapreduce.RecordReader createRecordReader(org.apache.hadoop.mapreduce.InputSplit, org.apache.hadoop.mapreduce.TaskAttemptContext) throws java.io.IOException":{"name":"createRecordReader","returnType":"org.apache.hadoop.mapreduce.RecordReader","args":["org.apache.hadoop.mapreduce.InputSplit","org.apache.hadoop.mapreduce.TaskAttemptContext"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.mapred.lib.aggregate.ValueAggregator":{"name":"org.apache.hadoop.mapred.lib.aggregate.ValueAggregator","methods":{}},"org.apache.hadoop.mapreduce.lib.aggregate.ValueAggregatorBaseDescriptor":{"name":"org.apache.hadoop.mapreduce.lib.aggregate.ValueAggregatorBaseDescriptor","methods":{"java.util.ArrayList generateKeyValPairs(java.lang.Object, java.lang.Object)":{"name":"generateKeyValPairs","returnType":"java.util.ArrayList","args":["java.lang.Object","java.lang.Object"],"exceptions":[]},"org.apache.hadoop.mapreduce.lib.aggregate.ValueAggregator generateValueAggregator(java.lang.String, long)":{"name":"generateValueAggregator","returnType":"org.apache.hadoop.mapreduce.lib.aggregate.ValueAggregator","args":["java.lang.String","long"],"exceptions":[]},"void configure(org.apache.hadoop.conf.Configuration)":{"name":"configure","returnType":"void","args":["org.apache.hadoop.conf.Configuration"],"exceptions":[]},"java.util.Map$Entry generateEntry(java.lang.String, java.lang.String, org.apache.hadoop.io.Text)":{"name":"generateEntry","returnType":"java.util.Map$Entry","args":["java.lang.String","java.lang.String","org.apache.hadoop.io.Text"],"exceptions":[]}}},"org.apache.hadoop.mapreduce.lib.aggregate.DoubleValueSum":{"name":"org.apache.hadoop.mapreduce.lib.aggregate.DoubleValueSum","methods":{"void reset()":{"name":"reset","returnType":"void","args":[],"exceptions":[]},"void addNextValue(double)":{"name":"addNextValue","returnType":"void","args":["double"],"exceptions":[]},"java.util.ArrayList getCombinerOutput()":{"name":"getCombinerOutput","returnType":"java.util.ArrayList","args":[],"exceptions":[]},"java.lang.String getReport()":{"name":"getReport","returnType":"java.lang.String","args":[],"exceptions":[]},"double getSum()":{"name":"getSum","returnType":"double","args":[],"exceptions":[]},"void addNextValue(java.lang.Object)":{"name":"addNextValue","returnType":"void","args":["java.lang.Object"],"exceptions":[]}}},"org.apache.hadoop.mapreduce.lib.aggregate.LongValueMin":{"name":"org.apache.hadoop.mapreduce.lib.aggregate.LongValueMin","methods":{"void reset()":{"name":"reset","returnType":"void","args":[],"exceptions":[]},"long getVal()":{"name":"getVal","returnType":"long","args":[],"exceptions":[]},"java.util.ArrayList getCombinerOutput()":{"name":"getCombinerOutput","returnType":"java.util.ArrayList","args":[],"exceptions":[]},"void addNextValue(long)":{"name":"addNextValue","returnType":"void","args":["long"],"exceptions":[]},"java.lang.String getReport()":{"name":"getReport","returnType":"java.lang.String","args":[],"exceptions":[]},"void addNextValue(java.lang.Object)":{"name":"addNextValue","returnType":"void","args":["java.lang.Object"],"exceptions":[]}}},"org.apache.hadoop.mapreduce.InputSplit":{"name":"org.apache.hadoop.mapreduce.InputSplit","methods":{"[Lorg.apache.hadoop.mapred.SplitLocationInfo; getLocationInfo() throws java.io.IOException":{"name":"getLocationInfo","returnType":"[Lorg.apache.hadoop.mapred.SplitLocationInfo;","args":[],"exceptions":["java.io.IOException"]},"[Ljava.lang.String; getLocations() throws java.lang.InterruptedException, java.io.IOException":{"name":"getLocations","returnType":"[Ljava.lang.String;","args":[],"exceptions":["java.lang.InterruptedException","java.io.IOException"]},"long getLength() throws java.lang.InterruptedException, java.io.IOException":{"name":"getLength","returnType":"long","args":[],"exceptions":["java.lang.InterruptedException","java.io.IOException"]}}},"org.apache.hadoop.mapred.lib.TotalOrderPartitioner":{"name":"org.apache.hadoop.mapred.lib.TotalOrderPartitioner","methods":{"void setPartitionFile(org.apache.hadoop.mapred.JobConf, org.apache.hadoop.fs.Path)":{"name":"setPartitionFile","returnType":"void","args":["org.apache.hadoop.mapred.JobConf","org.apache.hadoop.fs.Path"],"exceptions":[]},"java.lang.String getPartitionFile(org.apache.hadoop.mapred.JobConf)":{"name":"getPartitionFile","returnType":"java.lang.String","args":["org.apache.hadoop.mapred.JobConf"],"exceptions":[]},"void configure(org.apache.hadoop.mapred.JobConf)":{"name":"configure","returnType":"void","args":["org.apache.hadoop.mapred.JobConf"],"exceptions":[]}}},"org.apache.hadoop.mapreduce.Counter":{"name":"org.apache.hadoop.mapreduce.Counter","methods":{"void setDisplayName(java.lang.String)":{"name":"setDisplayName","returnType":"void","args":["java.lang.String"],"exceptions":[]},"long getValue()":{"name":"getValue","returnType":"long","args":[],"exceptions":[]},"java.lang.String getName()":{"name":"getName","returnType":"java.lang.String","args":[],"exceptions":[]},"org.apache.hadoop.mapreduce.Counter getUnderlyingCounter()":{"name":"getUnderlyingCounter","returnType":"org.apache.hadoop.mapreduce.Counter","args":[],"exceptions":[]},"void increment(long)":{"name":"increment","returnType":"void","args":["long"],"exceptions":[]},"void setValue(long)":{"name":"setValue","returnType":"void","args":["long"],"exceptions":[]},"java.lang.String getDisplayName()":{"name":"getDisplayName","returnType":"java.lang.String","args":[],"exceptions":[]}}},"org.apache.hadoop.mapreduce.lib.db.DBInputFormat":{"name":"org.apache.hadoop.mapreduce.lib.db.DBInputFormat","methods":{"org.apache.hadoop.mapreduce.lib.db.DBConfiguration getDBConf()":{"name":"getDBConf","returnType":"org.apache.hadoop.mapreduce.lib.db.DBConfiguration","args":[],"exceptions":[]},"org.apache.hadoop.mapreduce.RecordReader createRecordReader(org.apache.hadoop.mapreduce.InputSplit, org.apache.hadoop.mapreduce.TaskAttemptContext) throws java.lang.InterruptedException, java.io.IOException":{"name":"createRecordReader","returnType":"org.apache.hadoop.mapreduce.RecordReader","args":["org.apache.hadoop.mapreduce.InputSplit","org.apache.hadoop.mapreduce.TaskAttemptContext"],"exceptions":["java.lang.InterruptedException","java.io.IOException"]},"org.apache.hadoop.conf.Configuration getConf()":{"name":"getConf","returnType":"org.apache.hadoop.conf.Configuration","args":[],"exceptions":[]},"void setConf(org.apache.hadoop.conf.Configuration)":{"name":"setConf","returnType":"void","args":["org.apache.hadoop.conf.Configuration"],"exceptions":[]},"java.sql.Connection createConnection()":{"name":"createConnection","returnType":"java.sql.Connection","args":[],"exceptions":[]},"void setInput(org.apache.hadoop.mapreduce.Job, java.lang.Class, java.lang.String, java.lang.String, java.lang.String, [Ljava.lang.String;)":{"name":"setInput","returnType":"void","args":["org.apache.hadoop.mapreduce.Job","java.lang.Class","java.lang.String","java.lang.String","java.lang.String","[Ljava.lang.String;"],"exceptions":[]},"java.sql.Connection getConnection()":{"name":"getConnection","returnType":"java.sql.Connection","args":[],"exceptions":[]},"java.lang.String getDBProductName()":{"name":"getDBProductName","returnType":"java.lang.String","args":[],"exceptions":[]},"void setInput(org.apache.hadoop.mapreduce.Job, java.lang.Class, java.lang.String, java.lang.String)":{"name":"setInput","returnType":"void","args":["org.apache.hadoop.mapreduce.Job","java.lang.Class","java.lang.String","java.lang.String"],"exceptions":[]},"java.util.List getSplits(org.apache.hadoop.mapreduce.JobContext) throws java.io.IOException":{"name":"getSplits","returnType":"java.util.List","args":["org.apache.hadoop.mapreduce.JobContext"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.mapreduce.lib.join.StreamBackedIterator":{"name":"org.apache.hadoop.mapreduce.lib.join.StreamBackedIterator","methods":{"void add(org.apache.hadoop.io.Writable) throws java.io.IOException":{"name":"add","returnType":"void","args":["org.apache.hadoop.io.Writable"],"exceptions":["java.io.IOException"]},"boolean next(org.apache.hadoop.io.Writable) throws java.io.IOException":{"name":"next","returnType":"boolean","args":["org.apache.hadoop.io.Writable"],"exceptions":["java.io.IOException"]},"void reset()":{"name":"reset","returnType":"void","args":[],"exceptions":[]},"boolean replay(org.apache.hadoop.io.Writable) throws java.io.IOException":{"name":"replay","returnType":"boolean","args":["org.apache.hadoop.io.Writable"],"exceptions":["java.io.IOException"]},"boolean hasNext()":{"name":"hasNext","returnType":"boolean","args":[],"exceptions":[]},"void close() throws java.io.IOException":{"name":"close","returnType":"void","args":[],"exceptions":["java.io.IOException"]},"void clear()":{"name":"clear","returnType":"void","args":[],"exceptions":[]}}},"org.apache.hadoop.mapred.join.CompositeInputFormat":{"name":"org.apache.hadoop.mapred.join.CompositeInputFormat","methods":{"org.apache.hadoop.mapred.join.ComposableRecordReader getRecordReader(org.apache.hadoop.mapred.InputSplit, org.apache.hadoop.mapred.JobConf, org.apache.hadoop.mapred.Reporter) throws java.io.IOException":{"name":"getRecordReader","returnType":"org.apache.hadoop.mapred.join.ComposableRecordReader","args":["org.apache.hadoop.mapred.InputSplit","org.apache.hadoop.mapred.JobConf","org.apache.hadoop.mapred.Reporter"],"exceptions":["java.io.IOException"]},"java.lang.String compose(java.lang.String, java.lang.Class, [Lorg.apache.hadoop.fs.Path;)":{"name":"compose","returnType":"java.lang.String","args":["java.lang.String","java.lang.Class","[Lorg.apache.hadoop.fs.Path;"],"exceptions":[]},"org.apache.hadoop.mapred.RecordReader getRecordReader(org.apache.hadoop.mapred.InputSplit, org.apache.hadoop.mapred.JobConf, org.apache.hadoop.mapred.Reporter) throws java.io.IOException":{"name":"getRecordReader","returnType":"org.apache.hadoop.mapred.RecordReader","args":["org.apache.hadoop.mapred.InputSplit","org.apache.hadoop.mapred.JobConf","org.apache.hadoop.mapred.Reporter"],"exceptions":["java.io.IOException"]},"void setFormat(org.apache.hadoop.mapred.JobConf) throws java.io.IOException":{"name":"setFormat","returnType":"void","args":["org.apache.hadoop.mapred.JobConf"],"exceptions":["java.io.IOException"]},"java.lang.String compose(java.lang.Class, java.lang.String)":{"name":"compose","returnType":"java.lang.String","args":["java.lang.Class","java.lang.String"],"exceptions":[]},"java.lang.String compose(java.lang.String, java.lang.Class, [Ljava.lang.String;)":{"name":"compose","returnType":"java.lang.String","args":["java.lang.String","java.lang.Class","[Ljava.lang.String;"],"exceptions":[]},"[Lorg.apache.hadoop.mapred.InputSplit; getSplits(org.apache.hadoop.mapred.JobConf, int) throws java.io.IOException":{"name":"getSplits","returnType":"[Lorg.apache.hadoop.mapred.InputSplit;","args":["org.apache.hadoop.mapred.JobConf","int"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.mapreduce.Partitioner":{"name":"org.apache.hadoop.mapreduce.Partitioner","methods":{"int getPartition(java.lang.Object, java.lang.Object, int)":{"name":"getPartition","returnType":"int","args":["java.lang.Object","java.lang.Object","int"],"exceptions":[]}}},"org.apache.hadoop.mapreduce.lib.output.NullOutputFormat":{"name":"org.apache.hadoop.mapreduce.lib.output.NullOutputFormat","methods":{"void checkOutputSpecs(org.apache.hadoop.mapreduce.JobContext)":{"name":"checkOutputSpecs","returnType":"void","args":["org.apache.hadoop.mapreduce.JobContext"],"exceptions":[]},"org.apache.hadoop.mapreduce.RecordWriter getRecordWriter(org.apache.hadoop.mapreduce.TaskAttemptContext)":{"name":"getRecordWriter","returnType":"org.apache.hadoop.mapreduce.RecordWriter","args":["org.apache.hadoop.mapreduce.TaskAttemptContext"],"exceptions":[]},"org.apache.hadoop.mapreduce.OutputCommitter getOutputCommitter(org.apache.hadoop.mapreduce.TaskAttemptContext)":{"name":"getOutputCommitter","returnType":"org.apache.hadoop.mapreduce.OutputCommitter","args":["org.apache.hadoop.mapreduce.TaskAttemptContext"],"exceptions":[]}}},"org.apache.hadoop.mapreduce.lib.reduce.IntSumReducer":{"name":"org.apache.hadoop.mapreduce.lib.reduce.IntSumReducer","methods":{"void reduce(java.lang.Object, java.lang.Iterable, org.apache.hadoop.mapreduce.Reducer$Context) throws java.lang.InterruptedException, java.io.IOException":{"name":"reduce","returnType":"void","args":["java.lang.Object","java.lang.Iterable","org.apache.hadoop.mapreduce.Reducer$Context"],"exceptions":["java.lang.InterruptedException","java.io.IOException"]}}},"org.apache.hadoop.mapred.TaskCompletionEvent$Status":{"name":"org.apache.hadoop.mapred.TaskCompletionEvent$Status","methods":{"[Lorg.apache.hadoop.mapred.TaskCompletionEvent$Status; values()":{"name":"values","returnType":"[Lorg.apache.hadoop.mapred.TaskCompletionEvent$Status;","args":[],"exceptions":[]},"org.apache.hadoop.mapred.TaskCompletionEvent$Status valueOf(java.lang.String)":{"name":"valueOf","returnType":"org.apache.hadoop.mapred.TaskCompletionEvent$Status","args":["java.lang.String"],"exceptions":[]}}},"org.apache.hadoop.mapred.JobContext":{"name":"org.apache.hadoop.mapred.JobContext","methods":{"org.apache.hadoop.util.Progressable getProgressible()":{"name":"getProgressible","returnType":"org.apache.hadoop.util.Progressable","args":[],"exceptions":[]},"org.apache.hadoop.mapred.JobConf getJobConf()":{"name":"getJobConf","returnType":"org.apache.hadoop.mapred.JobConf","args":[],"exceptions":[]}}},"org.apache.hadoop.mapreduce.OutputCommitter":{"name":"org.apache.hadoop.mapreduce.OutputCommitter","methods":{"boolean isCommitJobRepeatable(org.apache.hadoop.mapreduce.JobContext) throws java.io.IOException":{"name":"isCommitJobRepeatable","returnType":"boolean","args":["org.apache.hadoop.mapreduce.JobContext"],"exceptions":["java.io.IOException"]},"void commitJob(org.apache.hadoop.mapreduce.JobContext) throws java.io.IOException":{"name":"commitJob","returnType":"void","args":["org.apache.hadoop.mapreduce.JobContext"],"exceptions":["java.io.IOException"]},"void abortTask(org.apache.hadoop.mapreduce.TaskAttemptContext) throws java.io.IOException":{"name":"abortTask","returnType":"void","args":["org.apache.hadoop.mapreduce.TaskAttemptContext"],"exceptions":["java.io.IOException"]},"boolean isRecoverySupported(org.apache.hadoop.mapreduce.JobContext) throws java.io.IOException":{"name":"isRecoverySupported","returnType":"boolean","args":["org.apache.hadoop.mapreduce.JobContext"],"exceptions":["java.io.IOException"]},"boolean needsTaskCommit(org.apache.hadoop.mapreduce.TaskAttemptContext) throws java.io.IOException":{"name":"needsTaskCommit","returnType":"boolean","args":["org.apache.hadoop.mapreduce.TaskAttemptContext"],"exceptions":["java.io.IOException"]},"void setupJob(org.apache.hadoop.mapreduce.JobContext) throws java.io.IOException":{"name":"setupJob","returnType":"void","args":["org.apache.hadoop.mapreduce.JobContext"],"exceptions":["java.io.IOException"]},"void recoverTask(org.apache.hadoop.mapreduce.TaskAttemptContext) throws java.io.IOException":{"name":"recoverTask","returnType":"void","args":["org.apache.hadoop.mapreduce.TaskAttemptContext"],"exceptions":["java.io.IOException"]},"void cleanupJob(org.apache.hadoop.mapreduce.JobContext) throws java.io.IOException":{"name":"cleanupJob","returnType":"void","args":["org.apache.hadoop.mapreduce.JobContext"],"exceptions":["java.io.IOException"]},"void abortJob(org.apache.hadoop.mapreduce.JobContext, org.apache.hadoop.mapreduce.JobStatus$State) throws java.io.IOException":{"name":"abortJob","returnType":"void","args":["org.apache.hadoop.mapreduce.JobContext","org.apache.hadoop.mapreduce.JobStatus$State"],"exceptions":["java.io.IOException"]},"void setupTask(org.apache.hadoop.mapreduce.TaskAttemptContext) throws java.io.IOException":{"name":"setupTask","returnType":"void","args":["org.apache.hadoop.mapreduce.TaskAttemptContext"],"exceptions":["java.io.IOException"]},"void commitTask(org.apache.hadoop.mapreduce.TaskAttemptContext) throws java.io.IOException":{"name":"commitTask","returnType":"void","args":["org.apache.hadoop.mapreduce.TaskAttemptContext"],"exceptions":["java.io.IOException"]},"boolean isRecoverySupported()":{"name":"isRecoverySupported","returnType":"boolean","args":[],"exceptions":[]}}},"org.apache.hadoop.mapred.lib.aggregate.ValueHistogram":{"name":"org.apache.hadoop.mapred.lib.aggregate.ValueHistogram","methods":{}},"org.apache.hadoop.mapreduce.lib.input.KeyValueTextInputFormat":{"name":"org.apache.hadoop.mapreduce.lib.input.KeyValueTextInputFormat","methods":{"org.apache.hadoop.mapreduce.RecordReader createRecordReader(org.apache.hadoop.mapreduce.InputSplit, org.apache.hadoop.mapreduce.TaskAttemptContext) throws java.io.IOException":{"name":"createRecordReader","returnType":"org.apache.hadoop.mapreduce.RecordReader","args":["org.apache.hadoop.mapreduce.InputSplit","org.apache.hadoop.mapreduce.TaskAttemptContext"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.mapred.lib.CombineSequenceFileInputFormat":{"name":"org.apache.hadoop.mapred.lib.CombineSequenceFileInputFormat","methods":{"org.apache.hadoop.mapred.RecordReader getRecordReader(org.apache.hadoop.mapred.InputSplit, org.apache.hadoop.mapred.JobConf, org.apache.hadoop.mapred.Reporter) throws java.io.IOException":{"name":"getRecordReader","returnType":"org.apache.hadoop.mapred.RecordReader","args":["org.apache.hadoop.mapred.InputSplit","org.apache.hadoop.mapred.JobConf","org.apache.hadoop.mapred.Reporter"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.mapred.InputSplit":{"name":"org.apache.hadoop.mapred.InputSplit","methods":{"[Ljava.lang.String; getLocations() throws java.io.IOException":{"name":"getLocations","returnType":"[Ljava.lang.String;","args":[],"exceptions":["java.io.IOException"]},"long getLength() throws java.io.IOException":{"name":"getLength","returnType":"long","args":[],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.mapreduce.lib.aggregate.LongValueSum":{"name":"org.apache.hadoop.mapreduce.lib.aggregate.LongValueSum","methods":{"void reset()":{"name":"reset","returnType":"void","args":[],"exceptions":[]},"java.util.ArrayList getCombinerOutput()":{"name":"getCombinerOutput","returnType":"java.util.ArrayList","args":[],"exceptions":[]},"void addNextValue(long)":{"name":"addNextValue","returnType":"void","args":["long"],"exceptions":[]},"java.lang.String getReport()":{"name":"getReport","returnType":"java.lang.String","args":[],"exceptions":[]},"void addNextValue(java.lang.Object)":{"name":"addNextValue","returnType":"void","args":["java.lang.Object"],"exceptions":[]},"long getSum()":{"name":"getSum","returnType":"long","args":[],"exceptions":[]}}},"org.apache.hadoop.mapreduce.lib.input.SequenceFileRecordReader":{"name":"org.apache.hadoop.mapreduce.lib.input.SequenceFileRecordReader","methods":{"void initialize(org.apache.hadoop.mapreduce.InputSplit, org.apache.hadoop.mapreduce.TaskAttemptContext) throws java.lang.InterruptedException, java.io.IOException":{"name":"initialize","returnType":"void","args":["org.apache.hadoop.mapreduce.InputSplit","org.apache.hadoop.mapreduce.TaskAttemptContext"],"exceptions":["java.lang.InterruptedException","java.io.IOException"]},"java.lang.Object getCurrentValue()":{"name":"getCurrentValue","returnType":"java.lang.Object","args":[],"exceptions":[]},"float getProgress() throws java.io.IOException":{"name":"getProgress","returnType":"float","args":[],"exceptions":["java.io.IOException"]},"boolean nextKeyValue() throws java.lang.InterruptedException, java.io.IOException":{"name":"nextKeyValue","returnType":"boolean","args":[],"exceptions":["java.lang.InterruptedException","java.io.IOException"]},"java.lang.Object getCurrentKey()":{"name":"getCurrentKey","returnType":"java.lang.Object","args":[],"exceptions":[]},"void close() throws java.io.IOException":{"name":"close","returnType":"void","args":[],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.mapred.lib.MultipleTextOutputFormat":{"name":"org.apache.hadoop.mapred.lib.MultipleTextOutputFormat","methods":{}},"org.apache.hadoop.mapreduce.lib.aggregate.StringValueMin":{"name":"org.apache.hadoop.mapreduce.lib.aggregate.StringValueMin","methods":{"void reset()":{"name":"reset","returnType":"void","args":[],"exceptions":[]},"java.lang.String getVal()":{"name":"getVal","returnType":"java.lang.String","args":[],"exceptions":[]},"java.util.ArrayList getCombinerOutput()":{"name":"getCombinerOutput","returnType":"java.util.ArrayList","args":[],"exceptions":[]},"java.lang.String getReport()":{"name":"getReport","returnType":"java.lang.String","args":[],"exceptions":[]},"void addNextValue(java.lang.Object)":{"name":"addNextValue","returnType":"void","args":["java.lang.Object"],"exceptions":[]}}},"org.apache.hadoop.mapreduce.lib.join.OverrideRecordReader":{"name":"org.apache.hadoop.mapreduce.lib.join.OverrideRecordReader","methods":{"org.apache.hadoop.io.Writable createValue()":{"name":"createValue","returnType":"org.apache.hadoop.io.Writable","args":[],"exceptions":[]}}},"org.apache.hadoop.mapreduce.lib.aggregate.ValueAggregatorJob":{"name":"org.apache.hadoop.mapreduce.lib.aggregate.ValueAggregatorJob","methods":{"org.apache.hadoop.mapreduce.lib.jobcontrol.JobControl createValueAggregatorJobs([Ljava.lang.String;) throws java.io.IOException":{"name":"createValueAggregatorJobs","returnType":"org.apache.hadoop.mapreduce.lib.jobcontrol.JobControl","args":["[Ljava.lang.String;"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.mapreduce.lib.jobcontrol.JobControl createValueAggregatorJobs([Ljava.lang.String;, [Ljava.lang.Class;) throws java.io.IOException":{"name":"createValueAggregatorJobs","returnType":"org.apache.hadoop.mapreduce.lib.jobcontrol.JobControl","args":["[Ljava.lang.String;","[Ljava.lang.Class;"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.conf.Configuration setAggregatorDescriptors([Ljava.lang.Class;)":{"name":"setAggregatorDescriptors","returnType":"org.apache.hadoop.conf.Configuration","args":["[Ljava.lang.Class;"],"exceptions":[]},"void main([Ljava.lang.String;) throws java.lang.InterruptedException, java.io.IOException, java.lang.ClassNotFoundException":{"name":"main","returnType":"void","args":["[Ljava.lang.String;"],"exceptions":["java.lang.InterruptedException","java.io.IOException","java.lang.ClassNotFoundException"]},"org.apache.hadoop.mapreduce.Job createValueAggregatorJob([Ljava.lang.String;, [Ljava.lang.Class;) throws java.io.IOException":{"name":"createValueAggregatorJob","returnType":"org.apache.hadoop.mapreduce.Job","args":["[Ljava.lang.String;","[Ljava.lang.Class;"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.mapreduce.Job createValueAggregatorJob(org.apache.hadoop.conf.Configuration, [Ljava.lang.String;) throws java.io.IOException":{"name":"createValueAggregatorJob","returnType":"org.apache.hadoop.mapreduce.Job","args":["org.apache.hadoop.conf.Configuration","[Ljava.lang.String;"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.mapred.lib.aggregate.LongValueMin":{"name":"org.apache.hadoop.mapred.lib.aggregate.LongValueMin","methods":{}},"org.apache.hadoop.mapred.lib.aggregate.LongValueSum":{"name":"org.apache.hadoop.mapred.lib.aggregate.LongValueSum","methods":{}},"org.apache.hadoop.mapred.JobID":{"name":"org.apache.hadoop.mapred.JobID","methods":{"java.lang.String getJobIDsPattern(java.lang.String, java.lang.Integer)":{"name":"getJobIDsPattern","returnType":"java.lang.String","args":["java.lang.String","java.lang.Integer"],"exceptions":[]},"org.apache.hadoop.mapred.JobID forName(java.lang.String) throws java.lang.IllegalArgumentException":{"name":"forName","returnType":"org.apache.hadoop.mapred.JobID","args":["java.lang.String"],"exceptions":["java.lang.IllegalArgumentException"]},"org.apache.hadoop.mapred.JobID read(java.io.DataInput) throws java.io.IOException":{"name":"read","returnType":"org.apache.hadoop.mapred.JobID","args":["java.io.DataInput"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.mapred.JobID downgrade(org.apache.hadoop.mapreduce.JobID)":{"name":"downgrade","returnType":"org.apache.hadoop.mapred.JobID","args":["org.apache.hadoop.mapreduce.JobID"],"exceptions":[]}}},"org.apache.hadoop.mapreduce.lib.input.FileSplit":{"name":"org.apache.hadoop.mapreduce.lib.input.FileSplit","methods":{"long getStart()":{"name":"getStart","returnType":"long","args":[],"exceptions":[]},"[Ljava.lang.String; getLocations() throws java.io.IOException":{"name":"getLocations","returnType":"[Ljava.lang.String;","args":[],"exceptions":["java.io.IOException"]},"[Lorg.apache.hadoop.mapred.SplitLocationInfo; getLocationInfo() throws java.io.IOException":{"name":"getLocationInfo","returnType":"[Lorg.apache.hadoop.mapred.SplitLocationInfo;","args":[],"exceptions":["java.io.IOException"]},"long getLength()":{"name":"getLength","returnType":"long","args":[],"exceptions":[]},"java.lang.String toString()":{"name":"toString","returnType":"java.lang.String","args":[],"exceptions":[]},"void write(java.io.DataOutput) throws java.io.IOException":{"name":"write","returnType":"void","args":["java.io.DataOutput"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.Path getPath()":{"name":"getPath","returnType":"org.apache.hadoop.fs.Path","args":[],"exceptions":[]},"void readFields(java.io.DataInput) throws java.io.IOException":{"name":"readFields","returnType":"void","args":["java.io.DataInput"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.mapreduce.lib.fieldsel.FieldSelectionReducer":{"name":"org.apache.hadoop.mapreduce.lib.fieldsel.FieldSelectionReducer","methods":{"void reduce(java.lang.Object, java.lang.Iterable, org.apache.hadoop.mapreduce.Reducer$Context) throws java.lang.InterruptedException, java.io.IOException":{"name":"reduce","returnType":"void","args":["java.lang.Object","java.lang.Iterable","org.apache.hadoop.mapreduce.Reducer$Context"],"exceptions":["java.lang.InterruptedException","java.io.IOException"]},"void reduce(org.apache.hadoop.io.Text, java.lang.Iterable, org.apache.hadoop.mapreduce.Reducer$Context) throws java.lang.InterruptedException, java.io.IOException":{"name":"reduce","returnType":"void","args":["org.apache.hadoop.io.Text","java.lang.Iterable","org.apache.hadoop.mapreduce.Reducer$Context"],"exceptions":["java.lang.InterruptedException","java.io.IOException"]},"void setup(org.apache.hadoop.mapreduce.Reducer$Context) throws java.lang.InterruptedException, java.io.IOException":{"name":"setup","returnType":"void","args":["org.apache.hadoop.mapreduce.Reducer$Context"],"exceptions":["java.lang.InterruptedException","java.io.IOException"]}}},"org.apache.hadoop.mapred.lib.aggregate.StringValueMax":{"name":"org.apache.hadoop.mapred.lib.aggregate.StringValueMax","methods":{}},"org.apache.hadoop.mapreduce.lib.output.SequenceFileAsBinaryOutputFormat":{"name":"org.apache.hadoop.mapreduce.lib.output.SequenceFileAsBinaryOutputFormat","methods":{"void checkOutputSpecs(org.apache.hadoop.mapreduce.JobContext) throws java.io.IOException":{"name":"checkOutputSpecs","returnType":"void","args":["org.apache.hadoop.mapreduce.JobContext"],"exceptions":["java.io.IOException"]},"void setSequenceFileOutputKeyClass(org.apache.hadoop.mapreduce.Job, java.lang.Class)":{"name":"setSequenceFileOutputKeyClass","returnType":"void","args":["org.apache.hadoop.mapreduce.Job","java.lang.Class"],"exceptions":[]},"org.apache.hadoop.mapreduce.RecordWriter getRecordWriter(org.apache.hadoop.mapreduce.TaskAttemptContext) throws java.io.IOException":{"name":"getRecordWriter","returnType":"org.apache.hadoop.mapreduce.RecordWriter","args":["org.apache.hadoop.mapreduce.TaskAttemptContext"],"exceptions":["java.io.IOException"]},"java.lang.Class getSequenceFileOutputValueClass(org.apache.hadoop.mapreduce.JobContext)":{"name":"getSequenceFileOutputValueClass","returnType":"java.lang.Class","args":["org.apache.hadoop.mapreduce.JobContext"],"exceptions":[]},"void setSequenceFileOutputValueClass(org.apache.hadoop.mapreduce.Job, java.lang.Class)":{"name":"setSequenceFileOutputValueClass","returnType":"void","args":["org.apache.hadoop.mapreduce.Job","java.lang.Class"],"exceptions":[]},"java.lang.Class getSequenceFileOutputKeyClass(org.apache.hadoop.mapreduce.JobContext)":{"name":"getSequenceFileOutputKeyClass","returnType":"java.lang.Class","args":["org.apache.hadoop.mapreduce.JobContext"],"exceptions":[]}}},"org.apache.hadoop.mapred.Reducer":{"name":"org.apache.hadoop.mapred.Reducer","methods":{"void reduce(java.lang.Object, java.util.Iterator, org.apache.hadoop.mapred.OutputCollector, org.apache.hadoop.mapred.Reporter) throws java.io.IOException":{"name":"reduce","returnType":"void","args":["java.lang.Object","java.util.Iterator","org.apache.hadoop.mapred.OutputCollector","org.apache.hadoop.mapred.Reporter"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.mapred.jobcontrol.JobControl":{"name":"org.apache.hadoop.mapred.jobcontrol.JobControl","methods":{"java.util.ArrayList getReadyJobs()":{"name":"getReadyJobs","returnType":"java.util.ArrayList","args":[],"exceptions":[]},"java.util.ArrayList getFailedJobs()":{"name":"getFailedJobs","returnType":"java.util.ArrayList","args":[],"exceptions":[]},"java.util.ArrayList getSuccessfulJobs()":{"name":"getSuccessfulJobs","returnType":"java.util.ArrayList","args":[],"exceptions":[]},"java.util.ArrayList getWaitingJobs()":{"name":"getWaitingJobs","returnType":"java.util.ArrayList","args":[],"exceptions":[]},"java.util.ArrayList getRunningJobs()":{"name":"getRunningJobs","returnType":"java.util.ArrayList","args":[],"exceptions":[]},"void addJobs(java.util.Collection)":{"name":"addJobs","returnType":"void","args":["java.util.Collection"],"exceptions":[]},"int getState()":{"name":"getState","returnType":"int","args":[],"exceptions":[]}}},"org.apache.hadoop.mapreduce.lib.join.ResetableIterator":{"name":"org.apache.hadoop.mapreduce.lib.join.ResetableIterator","methods":{"void add(org.apache.hadoop.io.Writable) throws java.io.IOException":{"name":"add","returnType":"void","args":["org.apache.hadoop.io.Writable"],"exceptions":["java.io.IOException"]},"boolean next(org.apache.hadoop.io.Writable) throws java.io.IOException":{"name":"next","returnType":"boolean","args":["org.apache.hadoop.io.Writable"],"exceptions":["java.io.IOException"]},"void reset()":{"name":"reset","returnType":"void","args":[],"exceptions":[]},"boolean replay(org.apache.hadoop.io.Writable) throws java.io.IOException":{"name":"replay","returnType":"boolean","args":["org.apache.hadoop.io.Writable"],"exceptions":["java.io.IOException"]},"boolean hasNext()":{"name":"hasNext","returnType":"boolean","args":[],"exceptions":[]},"void close() throws java.io.IOException":{"name":"close","returnType":"void","args":[],"exceptions":["java.io.IOException"]},"void clear()":{"name":"clear","returnType":"void","args":[],"exceptions":[]}}},"org.apache.hadoop.mapreduce.lib.aggregate.ValueAggregatorMapper":{"name":"org.apache.hadoop.mapreduce.lib.aggregate.ValueAggregatorMapper","methods":{"void map(java.lang.Object, java.lang.Object, org.apache.hadoop.mapreduce.Mapper$Context) throws java.lang.InterruptedException, java.io.IOException":{"name":"map","returnType":"void","args":["java.lang.Object","java.lang.Object","org.apache.hadoop.mapreduce.Mapper$Context"],"exceptions":["java.lang.InterruptedException","java.io.IOException"]},"void map(org.apache.hadoop.io.WritableComparable, org.apache.hadoop.io.Writable, org.apache.hadoop.mapreduce.Mapper$Context) throws java.lang.InterruptedException, java.io.IOException":{"name":"map","returnType":"void","args":["org.apache.hadoop.io.WritableComparable","org.apache.hadoop.io.Writable","org.apache.hadoop.mapreduce.Mapper$Context"],"exceptions":["java.lang.InterruptedException","java.io.IOException"]},"void setup(org.apache.hadoop.mapreduce.Mapper$Context) throws java.lang.InterruptedException, java.io.IOException":{"name":"setup","returnType":"void","args":["org.apache.hadoop.mapreduce.Mapper$Context"],"exceptions":["java.lang.InterruptedException","java.io.IOException"]}}},"org.apache.hadoop.mapred.join.CompositeInputSplit":{"name":"org.apache.hadoop.mapred.join.CompositeInputSplit","methods":{"[Ljava.lang.String; getLocations() throws java.io.IOException":{"name":"getLocations","returnType":"[Ljava.lang.String;","args":[],"exceptions":["java.io.IOException"]},"[Ljava.lang.String; getLocation(int) throws java.io.IOException":{"name":"getLocation","returnType":"[Ljava.lang.String;","args":["int"],"exceptions":["java.io.IOException"]},"void add(org.apache.hadoop.mapred.InputSplit) throws java.io.IOException":{"name":"add","returnType":"void","args":["org.apache.hadoop.mapred.InputSplit"],"exceptions":["java.io.IOException"]},"void write(java.io.DataOutput) throws java.io.IOException":{"name":"write","returnType":"void","args":["java.io.DataOutput"],"exceptions":["java.io.IOException"]},"long getLength() throws java.io.IOException":{"name":"getLength","returnType":"long","args":[],"exceptions":["java.io.IOException"]},"org.apache.hadoop.mapred.InputSplit get(int)":{"name":"get","returnType":"org.apache.hadoop.mapred.InputSplit","args":["int"],"exceptions":[]},"long getLength(int) throws java.io.IOException":{"name":"getLength","returnType":"long","args":["int"],"exceptions":["java.io.IOException"]},"void readFields(java.io.DataInput) throws java.io.IOException":{"name":"readFields","returnType":"void","args":["java.io.DataInput"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.mapred.lib.BinaryPartitioner":{"name":"org.apache.hadoop.mapred.lib.BinaryPartitioner","methods":{"void configure(org.apache.hadoop.mapred.JobConf)":{"name":"configure","returnType":"void","args":["org.apache.hadoop.mapred.JobConf"],"exceptions":[]}}},"org.apache.hadoop.mapreduce.lib.partition.KeyFieldBasedComparator":{"name":"org.apache.hadoop.mapreduce.lib.partition.KeyFieldBasedComparator","methods":{"void setKeyFieldComparatorOptions(org.apache.hadoop.mapreduce.Job, java.lang.String)":{"name":"setKeyFieldComparatorOptions","returnType":"void","args":["org.apache.hadoop.mapreduce.Job","java.lang.String"],"exceptions":[]},"int compare([B, int, int, [B, int, int)":{"name":"compare","returnType":"int","args":["[B","int","int","[B","int","int"],"exceptions":[]},"org.apache.hadoop.conf.Configuration getConf()":{"name":"getConf","returnType":"org.apache.hadoop.conf.Configuration","args":[],"exceptions":[]},"java.lang.String getKeyFieldComparatorOption(org.apache.hadoop.mapreduce.JobContext)":{"name":"getKeyFieldComparatorOption","returnType":"java.lang.String","args":["org.apache.hadoop.mapreduce.JobContext"],"exceptions":[]},"void setConf(org.apache.hadoop.conf.Configuration)":{"name":"setConf","returnType":"void","args":["org.apache.hadoop.conf.Configuration"],"exceptions":[]}}},"org.apache.hadoop.mapreduce.lib.input.SequenceFileAsTextInputFormat":{"name":"org.apache.hadoop.mapreduce.lib.input.SequenceFileAsTextInputFormat","methods":{"org.apache.hadoop.mapreduce.RecordReader createRecordReader(org.apache.hadoop.mapreduce.InputSplit, org.apache.hadoop.mapreduce.TaskAttemptContext) throws java.io.IOException":{"name":"createRecordReader","returnType":"org.apache.hadoop.mapreduce.RecordReader","args":["org.apache.hadoop.mapreduce.InputSplit","org.apache.hadoop.mapreduce.TaskAttemptContext"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.mapred.MultiFileSplit":{"name":"org.apache.hadoop.mapred.MultiFileSplit","methods":{"[Ljava.lang.String; getLocations() throws java.io.IOException":{"name":"getLocations","returnType":"[Ljava.lang.String;","args":[],"exceptions":["java.io.IOException"]},"java.lang.String toString()":{"name":"toString","returnType":"java.lang.String","args":[],"exceptions":[]}}},"org.apache.hadoop.mapred.JobQueueInfo":{"name":"org.apache.hadoop.mapred.JobQueueInfo","methods":{"void setQueueName(java.lang.String)":{"name":"setQueueName","returnType":"void","args":["java.lang.String"],"exceptions":[]},"void setJobStatuses([Lorg.apache.hadoop.mapreduce.JobStatus;)":{"name":"setJobStatuses","returnType":"void","args":["[Lorg.apache.hadoop.mapreduce.JobStatus;"],"exceptions":[]},"void setChildren(java.util.List)":{"name":"setChildren","returnType":"void","args":["java.util.List"],"exceptions":[]},"java.util.List getChildren()":{"name":"getChildren","returnType":"java.util.List","args":[],"exceptions":[]},"void setQueueState(java.lang.String)":{"name":"setQueueState","returnType":"void","args":["java.lang.String"],"exceptions":[]},"java.lang.String getQueueState()":{"name":"getQueueState","returnType":"java.lang.String","args":[],"exceptions":[]},"void setSchedulingInfo(java.lang.String)":{"name":"setSchedulingInfo","returnType":"void","args":["java.lang.String"],"exceptions":[]},"void setProperties(java.util.Properties)":{"name":"setProperties","returnType":"void","args":["java.util.Properties"],"exceptions":[]}}},"org.apache.hadoop.mapred.lib.db.DBOutputFormat":{"name":"org.apache.hadoop.mapred.lib.db.DBOutputFormat","methods":{"void setOutput(org.apache.hadoop.mapred.JobConf, java.lang.String, [Ljava.lang.String;)":{"name":"setOutput","returnType":"void","args":["org.apache.hadoop.mapred.JobConf","java.lang.String","[Ljava.lang.String;"],"exceptions":[]},"void checkOutputSpecs(org.apache.hadoop.fs.FileSystem, org.apache.hadoop.mapred.JobConf) throws java.io.IOException":{"name":"checkOutputSpecs","returnType":"void","args":["org.apache.hadoop.fs.FileSystem","org.apache.hadoop.mapred.JobConf"],"exceptions":["java.io.IOException"]},"void setOutput(org.apache.hadoop.mapred.JobConf, java.lang.String, int)":{"name":"setOutput","returnType":"void","args":["org.apache.hadoop.mapred.JobConf","java.lang.String","int"],"exceptions":[]},"org.apache.hadoop.mapred.RecordWriter getRecordWriter(org.apache.hadoop.fs.FileSystem, org.apache.hadoop.mapred.JobConf, java.lang.String, org.apache.hadoop.util.Progressable) throws java.io.IOException":{"name":"getRecordWriter","returnType":"org.apache.hadoop.mapred.RecordWriter","args":["org.apache.hadoop.fs.FileSystem","org.apache.hadoop.mapred.JobConf","java.lang.String","org.apache.hadoop.util.Progressable"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.mapred.RecordWriter":{"name":"org.apache.hadoop.mapred.RecordWriter","methods":{"void close(org.apache.hadoop.mapred.Reporter) throws java.io.IOException":{"name":"close","returnType":"void","args":["org.apache.hadoop.mapred.Reporter"],"exceptions":["java.io.IOException"]},"void write(java.lang.Object, java.lang.Object) throws java.io.IOException":{"name":"write","returnType":"void","args":["java.lang.Object","java.lang.Object"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.mapred.FileAlreadyExistsException":{"name":"org.apache.hadoop.mapred.FileAlreadyExistsException","methods":{}},"org.apache.hadoop.mapreduce.lib.join.JoinRecordReader":{"name":"org.apache.hadoop.mapreduce.lib.join.JoinRecordReader","methods":{"boolean nextKeyValue() throws java.lang.InterruptedException, java.io.IOException":{"name":"nextKeyValue","returnType":"boolean","args":[],"exceptions":["java.lang.InterruptedException","java.io.IOException"]},"org.apache.hadoop.mapreduce.lib.join.TupleWritable createValue()":{"name":"createValue","returnType":"org.apache.hadoop.mapreduce.lib.join.TupleWritable","args":[],"exceptions":[]},"org.apache.hadoop.io.Writable createValue()":{"name":"createValue","returnType":"org.apache.hadoop.io.Writable","args":[],"exceptions":[]}}},"org.apache.hadoop.mapreduce.lib.join.TupleWritable":{"name":"org.apache.hadoop.mapreduce.lib.join.TupleWritable","methods":{"int hashCode()":{"name":"hashCode","returnType":"int","args":[],"exceptions":[]},"java.lang.String toString()":{"name":"toString","returnType":"java.lang.String","args":[],"exceptions":[]},"void write(java.io.DataOutput) throws java.io.IOException":{"name":"write","returnType":"void","args":["java.io.DataOutput"],"exceptions":["java.io.IOException"]},"int size()":{"name":"size","returnType":"int","args":[],"exceptions":[]},"java.util.Iterator iterator()":{"name":"iterator","returnType":"java.util.Iterator","args":[],"exceptions":[]},"org.apache.hadoop.io.Writable get(int)":{"name":"get","returnType":"org.apache.hadoop.io.Writable","args":["int"],"exceptions":[]},"boolean equals(java.lang.Object)":{"name":"equals","returnType":"boolean","args":["java.lang.Object"],"exceptions":[]},"boolean has(int)":{"name":"has","returnType":"boolean","args":["int"],"exceptions":[]},"void readFields(java.io.DataInput) throws java.io.IOException":{"name":"readFields","returnType":"void","args":["java.io.DataInput"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.mapred.lib.CombineTextInputFormat":{"name":"org.apache.hadoop.mapred.lib.CombineTextInputFormat","methods":{"org.apache.hadoop.mapred.RecordReader getRecordReader(org.apache.hadoop.mapred.InputSplit, org.apache.hadoop.mapred.JobConf, org.apache.hadoop.mapred.Reporter) throws java.io.IOException":{"name":"getRecordReader","returnType":"org.apache.hadoop.mapred.RecordReader","args":["org.apache.hadoop.mapred.InputSplit","org.apache.hadoop.mapred.JobConf","org.apache.hadoop.mapred.Reporter"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.mapred.lib.IdentityReducer":{"name":"org.apache.hadoop.mapred.lib.IdentityReducer","methods":{"void reduce(java.lang.Object, java.util.Iterator, org.apache.hadoop.mapred.OutputCollector, org.apache.hadoop.mapred.Reporter) throws java.io.IOException":{"name":"reduce","returnType":"void","args":["java.lang.Object","java.util.Iterator","org.apache.hadoop.mapred.OutputCollector","org.apache.hadoop.mapred.Reporter"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.mapreduce.TaskID":{"name":"org.apache.hadoop.mapreduce.TaskID","methods":{"int hashCode()":{"name":"hashCode","returnType":"int","args":[],"exceptions":[]},"int compareTo(org.apache.hadoop.mapreduce.ID)":{"name":"compareTo","returnType":"int","args":["org.apache.hadoop.mapreduce.ID"],"exceptions":[]},"org.apache.hadoop.mapreduce.TaskType getTaskType(char)":{"name":"getTaskType","returnType":"org.apache.hadoop.mapreduce.TaskType","args":["char"],"exceptions":[]},"boolean isMap()":{"name":"isMap","returnType":"boolean","args":[],"exceptions":[]},"java.lang.String getAllTaskTypes()":{"name":"getAllTaskTypes","returnType":"java.lang.String","args":[],"exceptions":[]},"int compareTo(java.lang.Object)":{"name":"compareTo","returnType":"int","args":["java.lang.Object"],"exceptions":[]},"boolean equals(java.lang.Object)":{"name":"equals","returnType":"boolean","args":["java.lang.Object"],"exceptions":[]},"char getRepresentingCharacter(org.apache.hadoop.mapreduce.TaskType)":{"name":"getRepresentingCharacter","returnType":"char","args":["org.apache.hadoop.mapreduce.TaskType"],"exceptions":[]},"org.apache.hadoop.mapreduce.TaskID forName(java.lang.String) throws java.lang.IllegalArgumentException":{"name":"forName","returnType":"org.apache.hadoop.mapreduce.TaskID","args":["java.lang.String"],"exceptions":["java.lang.IllegalArgumentException"]},"java.lang.String toString()":{"name":"toString","returnType":"java.lang.String","args":[],"exceptions":[]},"void write(java.io.DataOutput) throws java.io.IOException":{"name":"write","returnType":"void","args":["java.io.DataOutput"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.mapreduce.JobID getJobID()":{"name":"getJobID","returnType":"org.apache.hadoop.mapreduce.JobID","args":[],"exceptions":[]},"org.apache.hadoop.mapreduce.TaskType getTaskType()":{"name":"getTaskType","returnType":"org.apache.hadoop.mapreduce.TaskType","args":[],"exceptions":[]},"void readFields(java.io.DataInput) throws java.io.IOException":{"name":"readFields","returnType":"void","args":["java.io.DataInput"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.filecache.DistributedCache":{"name":"org.apache.hadoop.filecache.DistributedCache","methods":{"void setLocalArchives(org.apache.hadoop.conf.Configuration, java.lang.String)":{"name":"setLocalArchives","returnType":"void","args":["org.apache.hadoop.conf.Configuration","java.lang.String"],"exceptions":[]},"long getTimestamp(org.apache.hadoop.conf.Configuration, java.net.URI) throws java.io.IOException":{"name":"getTimestamp","returnType":"long","args":["org.apache.hadoop.conf.Configuration","java.net.URI"],"exceptions":["java.io.IOException"]},"void setFileTimestamps(org.apache.hadoop.conf.Configuration, java.lang.String)":{"name":"setFileTimestamps","returnType":"void","args":["org.apache.hadoop.conf.Configuration","java.lang.String"],"exceptions":[]},"void addLocalFiles(org.apache.hadoop.conf.Configuration, java.lang.String)":{"name":"addLocalFiles","returnType":"void","args":["org.apache.hadoop.conf.Configuration","java.lang.String"],"exceptions":[]},"void createAllSymlink(org.apache.hadoop.conf.Configuration, java.io.File, java.io.File) throws java.io.IOException":{"name":"createAllSymlink","returnType":"void","args":["org.apache.hadoop.conf.Configuration","java.io.File","java.io.File"],"exceptions":["java.io.IOException"]},"void setArchiveTimestamps(org.apache.hadoop.conf.Configuration, java.lang.String)":{"name":"setArchiveTimestamps","returnType":"void","args":["org.apache.hadoop.conf.Configuration","java.lang.String"],"exceptions":[]},"void addLocalArchives(org.apache.hadoop.conf.Configuration, java.lang.String)":{"name":"addLocalArchives","returnType":"void","args":["org.apache.hadoop.conf.Configuration","java.lang.String"],"exceptions":[]},"void setLocalFiles(org.apache.hadoop.conf.Configuration, java.lang.String)":{"name":"setLocalFiles","returnType":"void","args":["org.apache.hadoop.conf.Configuration","java.lang.String"],"exceptions":[]},"org.apache.hadoop.fs.FileStatus getFileStatus(org.apache.hadoop.conf.Configuration, java.net.URI) throws java.io.IOException":{"name":"getFileStatus","returnType":"org.apache.hadoop.fs.FileStatus","args":["org.apache.hadoop.conf.Configuration","java.net.URI"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.mapred.jobcontrol.Job":{"name":"org.apache.hadoop.mapred.jobcontrol.Job","methods":{"java.lang.String getMapredJobID()":{"name":"getMapredJobID","returnType":"java.lang.String","args":[],"exceptions":[]},"void setMapredJobID(java.lang.String)":{"name":"setMapredJobID","returnType":"void","args":["java.lang.String"],"exceptions":[]},"org.apache.hadoop.mapred.JobID getAssignedJobID()":{"name":"getAssignedJobID","returnType":"org.apache.hadoop.mapred.JobID","args":[],"exceptions":[]},"void setJobConf(org.apache.hadoop.mapred.JobConf)":{"name":"setJobConf","returnType":"void","args":["org.apache.hadoop.mapred.JobConf"],"exceptions":[]},"java.util.ArrayList getDependingJobs()":{"name":"getDependingJobs","returnType":"java.util.ArrayList","args":[],"exceptions":[]},"org.apache.hadoop.mapred.JobConf getJobConf()":{"name":"getJobConf","returnType":"org.apache.hadoop.mapred.JobConf","args":[],"exceptions":[]},"boolean addDependingJob(org.apache.hadoop.mapred.jobcontrol.Job)":{"name":"addDependingJob","returnType":"boolean","args":["org.apache.hadoop.mapred.jobcontrol.Job"],"exceptions":[]},"int getState()":{"name":"getState","returnType":"int","args":[],"exceptions":[]},"void setAssignedJobID(org.apache.hadoop.mapred.JobID)":{"name":"setAssignedJobID","returnType":"void","args":["org.apache.hadoop.mapred.JobID"],"exceptions":[]},"org.apache.hadoop.mapred.JobClient getJobClient()":{"name":"getJobClient","returnType":"org.apache.hadoop.mapred.JobClient","args":[],"exceptions":[]}}},"org.apache.hadoop.mapreduce.lib.aggregate.ValueAggregator":{"name":"org.apache.hadoop.mapreduce.lib.aggregate.ValueAggregator","methods":{"void reset()":{"name":"reset","returnType":"void","args":[],"exceptions":[]},"java.util.ArrayList getCombinerOutput()":{"name":"getCombinerOutput","returnType":"java.util.ArrayList","args":[],"exceptions":[]},"java.lang.String getReport()":{"name":"getReport","returnType":"java.lang.String","args":[],"exceptions":[]},"void addNextValue(java.lang.Object)":{"name":"addNextValue","returnType":"void","args":["java.lang.Object"],"exceptions":[]}}},"org.apache.hadoop.mapreduce.lib.partition.InputSampler":{"name":"org.apache.hadoop.mapreduce.lib.partition.InputSampler","methods":{"void main([Ljava.lang.String;) throws java.lang.Exception":{"name":"main","returnType":"void","args":["[Ljava.lang.String;"],"exceptions":["java.lang.Exception"]},"void writePartitionFile(org.apache.hadoop.mapreduce.Job, org.apache.hadoop.mapreduce.lib.partition.InputSampler$Sampler) throws java.lang.InterruptedException, java.io.IOException, java.lang.ClassNotFoundException":{"name":"writePartitionFile","returnType":"void","args":["org.apache.hadoop.mapreduce.Job","org.apache.hadoop.mapreduce.lib.partition.InputSampler$Sampler"],"exceptions":["java.lang.InterruptedException","java.io.IOException","java.lang.ClassNotFoundException"]},"int run([Ljava.lang.String;) throws java.lang.Exception":{"name":"run","returnType":"int","args":["[Ljava.lang.String;"],"exceptions":["java.lang.Exception"]}}},"org.apache.hadoop.mapred.lib.db.DBWritable":{"name":"org.apache.hadoop.mapred.lib.db.DBWritable","methods":{}},"org.apache.hadoop.mapreduce.lib.join.MultiFilterRecordReader":{"name":"org.apache.hadoop.mapreduce.lib.join.MultiFilterRecordReader","methods":{"void initialize(org.apache.hadoop.mapreduce.InputSplit, org.apache.hadoop.mapreduce.TaskAttemptContext) throws java.lang.InterruptedException, java.io.IOException":{"name":"initialize","returnType":"void","args":["org.apache.hadoop.mapreduce.InputSplit","org.apache.hadoop.mapreduce.TaskAttemptContext"],"exceptions":["java.lang.InterruptedException","java.io.IOException"]},"boolean nextKeyValue() throws java.lang.InterruptedException, java.io.IOException":{"name":"nextKeyValue","returnType":"boolean","args":[],"exceptions":["java.lang.InterruptedException","java.io.IOException"]}}},"org.apache.hadoop.mapred.SequenceFileAsBinaryOutputFormat":{"name":"org.apache.hadoop.mapred.SequenceFileAsBinaryOutputFormat","methods":{"void setSequenceFileOutputValueClass(org.apache.hadoop.mapred.JobConf, java.lang.Class)":{"name":"setSequenceFileOutputValueClass","returnType":"void","args":["org.apache.hadoop.mapred.JobConf","java.lang.Class"],"exceptions":[]},"java.lang.Class getSequenceFileOutputKeyClass(org.apache.hadoop.mapred.JobConf)":{"name":"getSequenceFileOutputKeyClass","returnType":"java.lang.Class","args":["org.apache.hadoop.mapred.JobConf"],"exceptions":[]},"void checkOutputSpecs(org.apache.hadoop.fs.FileSystem, org.apache.hadoop.mapred.JobConf) throws java.io.IOException":{"name":"checkOutputSpecs","returnType":"void","args":["org.apache.hadoop.fs.FileSystem","org.apache.hadoop.mapred.JobConf"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.mapred.RecordWriter getRecordWriter(org.apache.hadoop.fs.FileSystem, org.apache.hadoop.mapred.JobConf, java.lang.String, org.apache.hadoop.util.Progressable) throws java.io.IOException":{"name":"getRecordWriter","returnType":"org.apache.hadoop.mapred.RecordWriter","args":["org.apache.hadoop.fs.FileSystem","org.apache.hadoop.mapred.JobConf","java.lang.String","org.apache.hadoop.util.Progressable"],"exceptions":["java.io.IOException"]},"void setSequenceFileOutputKeyClass(org.apache.hadoop.mapred.JobConf, java.lang.Class)":{"name":"setSequenceFileOutputKeyClass","returnType":"void","args":["org.apache.hadoop.mapred.JobConf","java.lang.Class"],"exceptions":[]},"java.lang.Class getSequenceFileOutputValueClass(org.apache.hadoop.mapred.JobConf)":{"name":"getSequenceFileOutputValueClass","returnType":"java.lang.Class","args":["org.apache.hadoop.mapred.JobConf"],"exceptions":[]}}},"org.apache.hadoop.mapreduce.lib.join.OuterJoinRecordReader":{"name":"org.apache.hadoop.mapreduce.lib.join.OuterJoinRecordReader","methods":{}},"org.apache.hadoop.mapreduce.lib.input.CombineSequenceFileInputFormat":{"name":"org.apache.hadoop.mapreduce.lib.input.CombineSequenceFileInputFormat","methods":{"org.apache.hadoop.mapreduce.RecordReader createRecordReader(org.apache.hadoop.mapreduce.InputSplit, org.apache.hadoop.mapreduce.TaskAttemptContext) throws java.io.IOException":{"name":"createRecordReader","returnType":"org.apache.hadoop.mapreduce.RecordReader","args":["org.apache.hadoop.mapreduce.InputSplit","org.apache.hadoop.mapreduce.TaskAttemptContext"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.mapred.lib.aggregate.UniqValueCount":{"name":"org.apache.hadoop.mapred.lib.aggregate.UniqValueCount","methods":{}},"org.apache.hadoop.mapred.lib.MultipleOutputFormat":{"name":"org.apache.hadoop.mapred.lib.MultipleOutputFormat","methods":{"org.apache.hadoop.mapred.RecordWriter getRecordWriter(org.apache.hadoop.fs.FileSystem, org.apache.hadoop.mapred.JobConf, java.lang.String, org.apache.hadoop.util.Progressable) throws java.io.IOException":{"name":"getRecordWriter","returnType":"org.apache.hadoop.mapred.RecordWriter","args":["org.apache.hadoop.fs.FileSystem","org.apache.hadoop.mapred.JobConf","java.lang.String","org.apache.hadoop.util.Progressable"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.mapred.JobStatus":{"name":"org.apache.hadoop.mapred.JobStatus","methods":{"float mapProgress()":{"name":"mapProgress","returnType":"float","args":[],"exceptions":[]},"float setupProgress()":{"name":"setupProgress","returnType":"float","args":[],"exceptions":[]},"org.apache.hadoop.mapred.JobPriority getJobPriority()":{"name":"getJobPriority","returnType":"org.apache.hadoop.mapred.JobPriority","args":[],"exceptions":[]},"org.apache.hadoop.mapred.JobID getJobID()":{"name":"getJobID","returnType":"org.apache.hadoop.mapred.JobID","args":[],"exceptions":[]},"void setRunState(int)":{"name":"setRunState","returnType":"void","args":["int"],"exceptions":[]},"float cleanupProgress()":{"name":"cleanupProgress","returnType":"float","args":[],"exceptions":[]},"org.apache.hadoop.mapred.JobStatus downgrade(org.apache.hadoop.mapreduce.JobStatus)":{"name":"downgrade","returnType":"org.apache.hadoop.mapred.JobStatus","args":["org.apache.hadoop.mapreduce.JobStatus"],"exceptions":[]},"int getRunState()":{"name":"getRunState","returnType":"int","args":[],"exceptions":[]},"float reduceProgress()":{"name":"reduceProgress","returnType":"float","args":[],"exceptions":[]},"void setFailureInfo(java.lang.String)":{"name":"setFailureInfo","returnType":"void","args":["java.lang.String"],"exceptions":[]},"org.apache.hadoop.mapreduce.JobID getJobID()":{"name":"getJobID","returnType":"org.apache.hadoop.mapreduce.JobID","args":[],"exceptions":[]},"void setJobPriority(org.apache.hadoop.mapred.JobPriority)":{"name":"setJobPriority","returnType":"void","args":["org.apache.hadoop.mapred.JobPriority"],"exceptions":[]},"java.lang.String getJobRunState(int)":{"name":"getJobRunState","returnType":"java.lang.String","args":["int"],"exceptions":[]},"java.lang.String getJobId()":{"name":"getJobId","returnType":"java.lang.String","args":[],"exceptions":[]},"void setSchedulingInfo(java.lang.String)":{"name":"setSchedulingInfo","returnType":"void","args":["java.lang.String"],"exceptions":[]}}},"org.apache.hadoop.mapred.join.ComposableInputFormat":{"name":"org.apache.hadoop.mapred.join.ComposableInputFormat","methods":{"org.apache.hadoop.mapred.join.ComposableRecordReader getRecordReader(org.apache.hadoop.mapred.InputSplit, org.apache.hadoop.mapred.JobConf, org.apache.hadoop.mapred.Reporter) throws java.io.IOException":{"name":"getRecordReader","returnType":"org.apache.hadoop.mapred.join.ComposableRecordReader","args":["org.apache.hadoop.mapred.InputSplit","org.apache.hadoop.mapred.JobConf","org.apache.hadoop.mapred.Reporter"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.mapreduce.lib.join.WrappedRecordReader":{"name":"org.apache.hadoop.mapreduce.lib.join.WrappedRecordReader","methods":{"void accept(org.apache.hadoop.mapreduce.lib.join.CompositeRecordReader$JoinCollector, org.apache.hadoop.io.WritableComparable) throws java.lang.InterruptedException, java.io.IOException":{"name":"accept","returnType":"void","args":["org.apache.hadoop.mapreduce.lib.join.CompositeRecordReader$JoinCollector","org.apache.hadoop.io.WritableComparable"],"exceptions":["java.lang.InterruptedException","java.io.IOException"]},"int hashCode()":{"name":"hashCode","returnType":"int","args":[],"exceptions":[]},"int id()":{"name":"id","returnType":"int","args":[],"exceptions":[]},"void initialize(org.apache.hadoop.mapreduce.InputSplit, org.apache.hadoop.mapreduce.TaskAttemptContext) throws java.lang.InterruptedException, java.io.IOException":{"name":"initialize","returnType":"void","args":["org.apache.hadoop.mapreduce.InputSplit","org.apache.hadoop.mapreduce.TaskAttemptContext"],"exceptions":["java.lang.InterruptedException","java.io.IOException"]},"void skip(org.apache.hadoop.io.WritableComparable) throws java.lang.InterruptedException, java.io.IOException":{"name":"skip","returnType":"void","args":["org.apache.hadoop.io.WritableComparable"],"exceptions":["java.lang.InterruptedException","java.io.IOException"]},"int compareTo(org.apache.hadoop.mapreduce.lib.join.ComposableRecordReader)":{"name":"compareTo","returnType":"int","args":["org.apache.hadoop.mapreduce.lib.join.ComposableRecordReader"],"exceptions":[]},"org.apache.hadoop.io.WritableComparable key()":{"name":"key","returnType":"org.apache.hadoop.io.WritableComparable","args":[],"exceptions":[]},"float getProgress() throws java.lang.InterruptedException, java.io.IOException":{"name":"getProgress","returnType":"float","args":[],"exceptions":["java.lang.InterruptedException","java.io.IOException"]},"int compareTo(java.lang.Object)":{"name":"compareTo","returnType":"int","args":["java.lang.Object"],"exceptions":[]},"boolean hasNext()":{"name":"hasNext","returnType":"boolean","args":[],"exceptions":[]},"boolean equals(java.lang.Object)":{"name":"equals","returnType":"boolean","args":["java.lang.Object"],"exceptions":[]},"org.apache.hadoop.io.Writable getCurrentValue() throws java.lang.InterruptedException, java.io.IOException":{"name":"getCurrentValue","returnType":"org.apache.hadoop.io.Writable","args":[],"exceptions":["java.lang.InterruptedException","java.io.IOException"]},"java.lang.Object getCurrentValue() throws java.lang.InterruptedException, java.io.IOException":{"name":"getCurrentValue","returnType":"java.lang.Object","args":[],"exceptions":["java.lang.InterruptedException","java.io.IOException"]},"org.apache.hadoop.io.WritableComparable getCurrentKey() throws java.lang.InterruptedException, java.io.IOException":{"name":"getCurrentKey","returnType":"org.apache.hadoop.io.WritableComparable","args":[],"exceptions":["java.lang.InterruptedException","java.io.IOException"]},"org.apache.hadoop.io.WritableComparable createKey()":{"name":"createKey","returnType":"org.apache.hadoop.io.WritableComparable","args":[],"exceptions":[]},"boolean nextKeyValue() throws java.lang.InterruptedException, java.io.IOException":{"name":"nextKeyValue","returnType":"boolean","args":[],"exceptions":["java.lang.InterruptedException","java.io.IOException"]},"org.apache.hadoop.io.Writable createValue()":{"name":"createValue","returnType":"org.apache.hadoop.io.Writable","args":[],"exceptions":[]},"java.lang.Object getCurrentKey() throws java.lang.InterruptedException, java.io.IOException":{"name":"getCurrentKey","returnType":"java.lang.Object","args":[],"exceptions":["java.lang.InterruptedException","java.io.IOException"]},"void close() throws java.io.IOException":{"name":"close","returnType":"void","args":[],"exceptions":["java.io.IOException"]},"void key(org.apache.hadoop.io.WritableComparable) throws java.io.IOException":{"name":"key","returnType":"void","args":["org.apache.hadoop.io.WritableComparable"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.mapred.TaskAttemptContext":{"name":"org.apache.hadoop.mapred.TaskAttemptContext","methods":{"org.apache.hadoop.util.Progressable getProgressible()":{"name":"getProgressible","returnType":"org.apache.hadoop.util.Progressable","args":[],"exceptions":[]},"org.apache.hadoop.mapred.TaskAttemptID getTaskAttemptID()":{"name":"getTaskAttemptID","returnType":"org.apache.hadoop.mapred.TaskAttemptID","args":[],"exceptions":[]},"org.apache.hadoop.mapred.JobConf getJobConf()":{"name":"getJobConf","returnType":"org.apache.hadoop.mapred.JobConf","args":[],"exceptions":[]}}},"org.apache.hadoop.mapreduce.Reducer":{"name":"org.apache.hadoop.mapreduce.Reducer","methods":{"void run(org.apache.hadoop.mapreduce.Reducer$Context) throws java.lang.InterruptedException, java.io.IOException":{"name":"run","returnType":"void","args":["org.apache.hadoop.mapreduce.Reducer$Context"],"exceptions":["java.lang.InterruptedException","java.io.IOException"]}}},"org.apache.hadoop.mapreduce.lib.join.CompositeInputSplit":{"name":"org.apache.hadoop.mapreduce.lib.join.CompositeInputSplit","methods":{"[Ljava.lang.String; getLocation(int) throws java.lang.InterruptedException, java.io.IOException":{"name":"getLocation","returnType":"[Ljava.lang.String;","args":["int"],"exceptions":["java.lang.InterruptedException","java.io.IOException"]},"void write(java.io.DataOutput) throws java.io.IOException":{"name":"write","returnType":"void","args":["java.io.DataOutput"],"exceptions":["java.io.IOException"]},"[Ljava.lang.String; getLocations() throws java.lang.InterruptedException, java.io.IOException":{"name":"getLocations","returnType":"[Ljava.lang.String;","args":[],"exceptions":["java.lang.InterruptedException","java.io.IOException"]},"long getLength() throws java.io.IOException":{"name":"getLength","returnType":"long","args":[],"exceptions":["java.io.IOException"]},"void add(org.apache.hadoop.mapreduce.InputSplit) throws java.lang.InterruptedException, java.io.IOException":{"name":"add","returnType":"void","args":["org.apache.hadoop.mapreduce.InputSplit"],"exceptions":["java.lang.InterruptedException","java.io.IOException"]},"long getLength(int) throws java.lang.InterruptedException, java.io.IOException":{"name":"getLength","returnType":"long","args":["int"],"exceptions":["java.lang.InterruptedException","java.io.IOException"]},"org.apache.hadoop.mapreduce.InputSplit get(int)":{"name":"get","returnType":"org.apache.hadoop.mapreduce.InputSplit","args":["int"],"exceptions":[]},"void readFields(java.io.DataInput) throws java.io.IOException":{"name":"readFields","returnType":"void","args":["java.io.DataInput"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.mapred.TaskID":{"name":"org.apache.hadoop.mapred.TaskID","methods":{"org.apache.hadoop.mapred.TaskID read(java.io.DataInput) throws java.io.IOException":{"name":"read","returnType":"org.apache.hadoop.mapred.TaskID","args":["java.io.DataInput"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.mapred.JobID getJobID()":{"name":"getJobID","returnType":"org.apache.hadoop.mapred.JobID","args":[],"exceptions":[]},"org.apache.hadoop.mapreduce.JobID getJobID()":{"name":"getJobID","returnType":"org.apache.hadoop.mapreduce.JobID","args":[],"exceptions":[]},"java.lang.String getTaskIDsPattern(java.lang.String, java.lang.Integer, java.lang.Boolean, java.lang.Integer)":{"name":"getTaskIDsPattern","returnType":"java.lang.String","args":["java.lang.String","java.lang.Integer","java.lang.Boolean","java.lang.Integer"],"exceptions":[]},"org.apache.hadoop.mapred.TaskID downgrade(org.apache.hadoop.mapreduce.TaskID)":{"name":"downgrade","returnType":"org.apache.hadoop.mapred.TaskID","args":["org.apache.hadoop.mapreduce.TaskID"],"exceptions":[]},"org.apache.hadoop.mapred.TaskID forName(java.lang.String) throws java.lang.IllegalArgumentException":{"name":"forName","returnType":"org.apache.hadoop.mapred.TaskID","args":["java.lang.String"],"exceptions":["java.lang.IllegalArgumentException"]},"java.lang.String getTaskIDsPattern(java.lang.String, java.lang.Integer, org.apache.hadoop.mapreduce.TaskType, java.lang.Integer)":{"name":"getTaskIDsPattern","returnType":"java.lang.String","args":["java.lang.String","java.lang.Integer","org.apache.hadoop.mapreduce.TaskType","java.lang.Integer"],"exceptions":[]}}},"org.apache.hadoop.mapreduce.JobID":{"name":"org.apache.hadoop.mapreduce.JobID","methods":{"int hashCode()":{"name":"hashCode","returnType":"int","args":[],"exceptions":[]},"int compareTo(org.apache.hadoop.mapreduce.ID)":{"name":"compareTo","returnType":"int","args":["org.apache.hadoop.mapreduce.ID"],"exceptions":[]},"org.apache.hadoop.mapreduce.JobID forName(java.lang.String) throws java.lang.IllegalArgumentException":{"name":"forName","returnType":"org.apache.hadoop.mapreduce.JobID","args":["java.lang.String"],"exceptions":["java.lang.IllegalArgumentException"]},"java.lang.String toString()":{"name":"toString","returnType":"java.lang.String","args":[],"exceptions":[]},"void write(java.io.DataOutput) throws java.io.IOException":{"name":"write","returnType":"void","args":["java.io.DataOutput"],"exceptions":["java.io.IOException"]},"java.lang.String getJtIdentifier()":{"name":"getJtIdentifier","returnType":"java.lang.String","args":[],"exceptions":[]},"java.lang.StringBuilder appendTo(java.lang.StringBuilder)":{"name":"appendTo","returnType":"java.lang.StringBuilder","args":["java.lang.StringBuilder"],"exceptions":[]},"int compareTo(java.lang.Object)":{"name":"compareTo","returnType":"int","args":["java.lang.Object"],"exceptions":[]},"boolean equals(java.lang.Object)":{"name":"equals","returnType":"boolean","args":["java.lang.Object"],"exceptions":[]},"void readFields(java.io.DataInput) throws java.io.IOException":{"name":"readFields","returnType":"void","args":["java.io.DataInput"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.mapred.OutputLogFilter":{"name":"org.apache.hadoop.mapred.OutputLogFilter","methods":{"boolean accept(org.apache.hadoop.fs.Path)":{"name":"accept","returnType":"boolean","args":["org.apache.hadoop.fs.Path"],"exceptions":[]}}},"org.apache.hadoop.mapreduce.lib.map.RegexMapper":{"name":"org.apache.hadoop.mapreduce.lib.map.RegexMapper","methods":{"void map(java.lang.Object, java.lang.Object, org.apache.hadoop.mapreduce.Mapper$Context) throws java.lang.InterruptedException, java.io.IOException":{"name":"map","returnType":"void","args":["java.lang.Object","java.lang.Object","org.apache.hadoop.mapreduce.Mapper$Context"],"exceptions":["java.lang.InterruptedException","java.io.IOException"]},"void setup(org.apache.hadoop.mapreduce.Mapper$Context)":{"name":"setup","returnType":"void","args":["org.apache.hadoop.mapreduce.Mapper$Context"],"exceptions":[]},"void map(java.lang.Object, org.apache.hadoop.io.Text, org.apache.hadoop.mapreduce.Mapper$Context) throws java.lang.InterruptedException, java.io.IOException":{"name":"map","returnType":"void","args":["java.lang.Object","org.apache.hadoop.io.Text","org.apache.hadoop.mapreduce.Mapper$Context"],"exceptions":["java.lang.InterruptedException","java.io.IOException"]}}},"org.apache.hadoop.mapred.join.StreamBackedIterator":{"name":"org.apache.hadoop.mapred.join.StreamBackedIterator","methods":{}},"org.apache.hadoop.mapred.lib.FilterOutputFormat":{"name":"org.apache.hadoop.mapred.lib.FilterOutputFormat","methods":{"void checkOutputSpecs(org.apache.hadoop.fs.FileSystem, org.apache.hadoop.mapred.JobConf) throws java.io.IOException":{"name":"checkOutputSpecs","returnType":"void","args":["org.apache.hadoop.fs.FileSystem","org.apache.hadoop.mapred.JobConf"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.mapred.RecordWriter getRecordWriter(org.apache.hadoop.fs.FileSystem, org.apache.hadoop.mapred.JobConf, java.lang.String, org.apache.hadoop.util.Progressable) throws java.io.IOException":{"name":"getRecordWriter","returnType":"org.apache.hadoop.mapred.RecordWriter","args":["org.apache.hadoop.fs.FileSystem","org.apache.hadoop.mapred.JobConf","java.lang.String","org.apache.hadoop.util.Progressable"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.mapreduce.lib.aggregate.UserDefinedValueAggregatorDescriptor":{"name":"org.apache.hadoop.mapreduce.lib.aggregate.UserDefinedValueAggregatorDescriptor","methods":{"java.lang.String toString()":{"name":"toString","returnType":"java.lang.String","args":[],"exceptions":[]},"java.util.ArrayList generateKeyValPairs(java.lang.Object, java.lang.Object)":{"name":"generateKeyValPairs","returnType":"java.util.ArrayList","args":["java.lang.Object","java.lang.Object"],"exceptions":[]},"void configure(org.apache.hadoop.conf.Configuration)":{"name":"configure","returnType":"void","args":["org.apache.hadoop.conf.Configuration"],"exceptions":[]},"java.lang.Object createInstance(java.lang.String)":{"name":"createInstance","returnType":"java.lang.Object","args":["java.lang.String"],"exceptions":[]}}},"org.apache.hadoop.mapreduce.lib.aggregate.ValueHistogram":{"name":"org.apache.hadoop.mapreduce.lib.aggregate.ValueHistogram","methods":{"java.lang.String getReportDetails()":{"name":"getReportDetails","returnType":"java.lang.String","args":[],"exceptions":[]},"void reset()":{"name":"reset","returnType":"void","args":[],"exceptions":[]},"java.util.ArrayList getCombinerOutput()":{"name":"getCombinerOutput","returnType":"java.util.ArrayList","args":[],"exceptions":[]},"java.lang.String getReport()":{"name":"getReport","returnType":"java.lang.String","args":[],"exceptions":[]},"void addNextValue(java.lang.Object)":{"name":"addNextValue","returnType":"void","args":["java.lang.Object"],"exceptions":[]},"java.util.TreeMap getReportItems()":{"name":"getReportItems","returnType":"java.util.TreeMap","args":[],"exceptions":[]}}},"org.apache.hadoop.mapred.lib.aggregate.DoubleValueSum":{"name":"org.apache.hadoop.mapred.lib.aggregate.DoubleValueSum","methods":{}},"org.apache.hadoop.mapred.lib.NLineInputFormat":{"name":"org.apache.hadoop.mapred.lib.NLineInputFormat","methods":{"org.apache.hadoop.mapred.RecordReader getRecordReader(org.apache.hadoop.mapred.InputSplit, org.apache.hadoop.mapred.JobConf, org.apache.hadoop.mapred.Reporter) throws java.io.IOException":{"name":"getRecordReader","returnType":"org.apache.hadoop.mapred.RecordReader","args":["org.apache.hadoop.mapred.InputSplit","org.apache.hadoop.mapred.JobConf","org.apache.hadoop.mapred.Reporter"],"exceptions":["java.io.IOException"]},"void configure(org.apache.hadoop.mapred.JobConf)":{"name":"configure","returnType":"void","args":["org.apache.hadoop.mapred.JobConf"],"exceptions":[]},"[Lorg.apache.hadoop.mapred.InputSplit; getSplits(org.apache.hadoop.mapred.JobConf, int) throws java.io.IOException":{"name":"getSplits","returnType":"[Lorg.apache.hadoop.mapred.InputSplit;","args":["org.apache.hadoop.mapred.JobConf","int"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.mapred.JobConf":{"name":"org.apache.hadoop.mapred.JobConf","methods":{"void setInputFormat(java.lang.Class)":{"name":"setInputFormat","returnType":"void","args":["java.lang.Class"],"exceptions":[]},"void setUser(java.lang.String)":{"name":"setUser","returnType":"void","args":["java.lang.String"],"exceptions":[]},"int getMaxMapTaskFailuresPercent()":{"name":"getMaxMapTaskFailuresPercent","returnType":"int","args":[],"exceptions":[]},"void setMemoryForMapTask(long)":{"name":"setMemoryForMapTask","returnType":"void","args":["long"],"exceptions":[]},"org.apache.hadoop.io.RawComparator getOutputValueGroupingComparator()":{"name":"getOutputValueGroupingComparator","returnType":"org.apache.hadoop.io.RawComparator","args":[],"exceptions":[]},"void setOutputKeyClass(java.lang.Class)":{"name":"setOutputKeyClass","returnType":"void","args":["java.lang.Class"],"exceptions":[]},"void setProfileParams(java.lang.String)":{"name":"setProfileParams","returnType":"void","args":["java.lang.String"],"exceptions":[]},"int getMaxMapAttempts()":{"name":"getMaxMapAttempts","returnType":"int","args":[],"exceptions":[]},"void setNumMapTasks(int)":{"name":"setNumMapTasks","returnType":"void","args":["int"],"exceptions":[]},"java.lang.String getJobName()":{"name":"getJobName","returnType":"java.lang.String","args":[],"exceptions":[]},"java.lang.Class getMapOutputCompressorClass(java.lang.Class)":{"name":"getMapOutputCompressorClass","returnType":"java.lang.Class","args":["java.lang.Class"],"exceptions":[]},"java.lang.String getUser()":{"name":"getUser","returnType":"java.lang.String","args":[],"exceptions":[]},"void setMaxTaskFailuresPerTracker(int)":{"name":"setMaxTaskFailuresPerTracker","returnType":"void","args":["int"],"exceptions":[]},"void setCombinerClass(java.lang.Class)":{"name":"setCombinerClass","returnType":"void","args":["java.lang.Class"],"exceptions":[]},"java.lang.Class getMapOutputValueClass()":{"name":"getMapOutputValueClass","returnType":"java.lang.Class","args":[],"exceptions":[]},"void setKeepTaskFilesPattern(java.lang.String)":{"name":"setKeepTaskFilesPattern","returnType":"void","args":["java.lang.String"],"exceptions":[]},"java.lang.Class getMapOutputKeyClass()":{"name":"getMapOutputKeyClass","returnType":"java.lang.Class","args":[],"exceptions":[]},"org.apache.hadoop.conf.Configuration$IntegerRanges getProfileTaskRange(boolean)":{"name":"getProfileTaskRange","returnType":"org.apache.hadoop.conf.Configuration$IntegerRanges","args":["boolean"],"exceptions":[]},"boolean getUseNewReducer()":{"name":"getUseNewReducer","returnType":"boolean","args":[],"exceptions":[]},"void setUseNewMapper(boolean)":{"name":"setUseNewMapper","returnType":"void","args":["boolean"],"exceptions":[]},"java.lang.String getKeyFieldPartitionerOption()":{"name":"getKeyFieldPartitionerOption","returnType":"java.lang.String","args":[],"exceptions":[]},"boolean getProfileEnabled()":{"name":"getProfileEnabled","returnType":"boolean","args":[],"exceptions":[]},"void setOutputValueClass(java.lang.Class)":{"name":"setOutputValueClass","returnType":"void","args":["java.lang.Class"],"exceptions":[]},"void setReducerClass(java.lang.Class)":{"name":"setReducerClass","returnType":"void","args":["java.lang.Class"],"exceptions":[]},"void setKeyFieldPartitionerOptions(java.lang.String)":{"name":"setKeyFieldPartitionerOptions","returnType":"void","args":["java.lang.String"],"exceptions":[]},"void setMemoryForReduceTask(long)":{"name":"setMemoryForReduceTask","returnType":"void","args":["long"],"exceptions":[]},"void setProfileEnabled(boolean)":{"name":"setProfileEnabled","returnType":"void","args":["boolean"],"exceptions":[]},"org.apache.hadoop.io.RawComparator getCombinerKeyGroupingComparator()":{"name":"getCombinerKeyGroupingComparator","returnType":"org.apache.hadoop.io.RawComparator","args":[],"exceptions":[]},"void setCredentials(org.apache.hadoop.security.Credentials)":{"name":"setCredentials","returnType":"void","args":["org.apache.hadoop.security.Credentials"],"exceptions":[]},"void setOutputFormat(java.lang.Class)":{"name":"setOutputFormat","returnType":"void","args":["java.lang.Class"],"exceptions":[]},"int getNumTasksToExecutePerJvm()":{"name":"getNumTasksToExecutePerJvm","returnType":"int","args":[],"exceptions":[]},"java.lang.String findContainingJar(java.lang.Class)":{"name":"findContainingJar","returnType":"java.lang.String","args":["java.lang.Class"],"exceptions":[]},"void setMapOutputCompressorClass(java.lang.Class)":{"name":"setMapOutputCompressorClass","returnType":"void","args":["java.lang.Class"],"exceptions":[]},"boolean getCompressMapOutput()":{"name":"getCompressMapOutput","returnType":"boolean","args":[],"exceptions":[]},"org.apache.hadoop.io.RawComparator getOutputKeyComparator()":{"name":"getOutputKeyComparator","returnType":"org.apache.hadoop.io.RawComparator","args":[],"exceptions":[]},"void setJar(java.lang.String)":{"name":"setJar","returnType":"void","args":["java.lang.String"],"exceptions":[]},"boolean getKeepFailedTaskFiles()":{"name":"getKeepFailedTaskFiles","returnType":"boolean","args":[],"exceptions":[]},"java.lang.String getKeyFieldComparatorOption()":{"name":"getKeyFieldComparatorOption","returnType":"java.lang.String","args":[],"exceptions":[]},"void setSpeculativeExecution(boolean)":{"name":"setSpeculativeExecution","returnType":"void","args":["boolean"],"exceptions":[]},"java.lang.Class getPartitionerClass()":{"name":"getPartitionerClass","returnType":"java.lang.Class","args":[],"exceptions":[]},"java.lang.String getMapDebugScript()":{"name":"getMapDebugScript","returnType":"java.lang.String","args":[],"exceptions":[]},"java.lang.Class getCombinerClass()":{"name":"getCombinerClass","returnType":"java.lang.Class","args":[],"exceptions":[]},"void setMaxReduceAttempts(int)":{"name":"setMaxReduceAttempts","returnType":"void","args":["int"],"exceptions":[]},"void setMapOutputKeyClass(java.lang.Class)":{"name":"setMapOutputKeyClass","returnType":"void","args":["java.lang.Class"],"exceptions":[]},"int getMaxReduceAttempts()":{"name":"getMaxReduceAttempts","returnType":"int","args":[],"exceptions":[]},"[Ljava.lang.String; getLocalDirs() throws java.io.IOException":{"name":"getLocalDirs","returnType":"[Ljava.lang.String;","args":[],"exceptions":["java.io.IOException"]},"org.apache.hadoop.mapred.InputFormat getInputFormat()":{"name":"getInputFormat","returnType":"org.apache.hadoop.mapred.InputFormat","args":[],"exceptions":[]},"void setReduceDebugScript(java.lang.String)":{"name":"setReduceDebugScript","returnType":"void","args":["java.lang.String"],"exceptions":[]},"void setMapperClass(java.lang.Class)":{"name":"setMapperClass","returnType":"void","args":["java.lang.Class"],"exceptions":[]},"void setJobName(java.lang.String)":{"name":"setJobName","returnType":"void","args":["java.lang.String"],"exceptions":[]},"void setOutputCommitter(java.lang.Class)":{"name":"setOutputCommitter","returnType":"void","args":["java.lang.Class"],"exceptions":[]},"void setMapRunnerClass(java.lang.Class)":{"name":"setMapRunnerClass","returnType":"void","args":["java.lang.Class"],"exceptions":[]},"void setReduceSpeculativeExecution(boolean)":{"name":"setReduceSpeculativeExecution","returnType":"void","args":["boolean"],"exceptions":[]},"java.lang.String getJar()":{"name":"getJar","returnType":"java.lang.String","args":[],"exceptions":[]},"org.apache.hadoop.security.Credentials getCredentials()":{"name":"getCredentials","returnType":"org.apache.hadoop.security.Credentials","args":[],"exceptions":[]},"void setJobEndNotificationURI(java.lang.String)":{"name":"setJobEndNotificationURI","returnType":"void","args":["java.lang.String"],"exceptions":[]},"void setJarByClass(java.lang.Class)":{"name":"setJarByClass","returnType":"void","args":["java.lang.Class"],"exceptions":[]},"long normalizeMemoryConfigValue(long)":{"name":"normalizeMemoryConfigValue","returnType":"long","args":["long"],"exceptions":[]},"void setKeepFailedTaskFiles(boolean)":{"name":"setKeepFailedTaskFiles","returnType":"void","args":["boolean"],"exceptions":[]},"java.lang.String getReduceDebugScript()":{"name":"getReduceDebugScript","returnType":"java.lang.String","args":[],"exceptions":[]},"void setJobPriority(org.apache.hadoop.mapred.JobPriority)":{"name":"setJobPriority","returnType":"void","args":["org.apache.hadoop.mapred.JobPriority"],"exceptions":[]},"java.lang.Class getMapperClass()":{"name":"getMapperClass","returnType":"java.lang.Class","args":[],"exceptions":[]},"void setQueueName(java.lang.String)":{"name":"setQueueName","returnType":"void","args":["java.lang.String"],"exceptions":[]},"void setUseNewReducer(boolean)":{"name":"setUseNewReducer","returnType":"void","args":["boolean"],"exceptions":[]},"java.lang.Class getMapRunnerClass()":{"name":"getMapRunnerClass","returnType":"java.lang.Class","args":[],"exceptions":[]},"int getMaxTaskFailuresPerTracker()":{"name":"getMaxTaskFailuresPerTracker","returnType":"int","args":[],"exceptions":[]},"java.lang.String getProfileParams()":{"name":"getProfileParams","returnType":"java.lang.String","args":[],"exceptions":[]},"org.apache.hadoop.mapred.JobPriority getJobPriority()":{"name":"getJobPriority","returnType":"org.apache.hadoop.mapred.JobPriority","args":[],"exceptions":[]},"java.lang.String getQueueName()":{"name":"getQueueName","returnType":"java.lang.String","args":[],"exceptions":[]},"java.util.regex.Pattern getJarUnpackPattern()":{"name":"getJarUnpackPattern","returnType":"java.util.regex.Pattern","args":[],"exceptions":[]},"void setMapSpeculativeExecution(boolean)":{"name":"setMapSpeculativeExecution","returnType":"void","args":["boolean"],"exceptions":[]},"void setMaxMapAttempts(int)":{"name":"setMaxMapAttempts","returnType":"void","args":["int"],"exceptions":[]},"long getMaxVirtualMemoryForTask()":{"name":"getMaxVirtualMemoryForTask","returnType":"long","args":[],"exceptions":[]},"org.apache.hadoop.mapred.OutputFormat getOutputFormat()":{"name":"getOutputFormat","returnType":"org.apache.hadoop.mapred.OutputFormat","args":[],"exceptions":[]},"long getMemoryForMapTask()":{"name":"getMemoryForMapTask","returnType":"long","args":[],"exceptions":[]},"java.lang.String getSessionId()":{"name":"getSessionId","returnType":"java.lang.String","args":[],"exceptions":[]},"long getMaxPhysicalMemoryForTask()":{"name":"getMaxPhysicalMemoryForTask","returnType":"long","args":[],"exceptions":[]},"boolean getUseNewMapper()":{"name":"getUseNewMapper","returnType":"boolean","args":[],"exceptions":[]},"void setMaxMapTaskFailuresPercent(int)":{"name":"setMaxMapTaskFailuresPercent","returnType":"void","args":["int"],"exceptions":[]},"java.lang.String getKeepTaskFilesPattern()":{"name":"getKeepTaskFilesPattern","returnType":"java.lang.String","args":[],"exceptions":[]},"void setPartitionerClass(java.lang.Class)":{"name":"setPartitionerClass","returnType":"void","args":["java.lang.Class"],"exceptions":[]},"void setNumReduceTasks(int)":{"name":"setNumReduceTasks","returnType":"void","args":["int"],"exceptions":[]},"java.lang.Class getReducerClass()":{"name":"getReducerClass","returnType":"java.lang.Class","args":[],"exceptions":[]},"int getNumReduceTasks()":{"name":"getNumReduceTasks","returnType":"int","args":[],"exceptions":[]},"void setCombinerKeyGroupingComparator(java.lang.Class)":{"name":"setCombinerKeyGroupingComparator","returnType":"void","args":["java.lang.Class"],"exceptions":[]},"void setMaxReduceTaskFailuresPercent(int)":{"name":"setMaxReduceTaskFailuresPercent","returnType":"void","args":["int"],"exceptions":[]},"java.lang.String getJobLocalDir()":{"name":"getJobLocalDir","returnType":"java.lang.String","args":[],"exceptions":[]},"org.apache.hadoop.fs.Path getWorkingDirectory()":{"name":"getWorkingDirectory","returnType":"org.apache.hadoop.fs.Path","args":[],"exceptions":[]},"org.apache.hadoop.fs.Path getLocalPath(java.lang.String) throws java.io.IOException":{"name":"getLocalPath","returnType":"org.apache.hadoop.fs.Path","args":["java.lang.String"],"exceptions":["java.io.IOException"]},"void setWorkingDirectory(org.apache.hadoop.fs.Path)":{"name":"setWorkingDirectory","returnType":"void","args":["org.apache.hadoop.fs.Path"],"exceptions":[]},"int getNumMapTasks()":{"name":"getNumMapTasks","returnType":"int","args":[],"exceptions":[]},"void setOutputValueGroupingComparator(java.lang.Class)":{"name":"setOutputValueGroupingComparator","returnType":"void","args":["java.lang.Class"],"exceptions":[]},"boolean getReduceSpeculativeExecution()":{"name":"getReduceSpeculativeExecution","returnType":"boolean","args":[],"exceptions":[]},"boolean getMapSpeculativeExecution()":{"name":"getMapSpeculativeExecution","returnType":"boolean","args":[],"exceptions":[]},"void setCompressMapOutput(boolean)":{"name":"setCompressMapOutput","returnType":"void","args":["boolean"],"exceptions":[]},"void setMaxPhysicalMemoryForTask(long)":{"name":"setMaxPhysicalMemoryForTask","returnType":"void","args":["long"],"exceptions":[]},"java.lang.Class getOutputKeyClass()":{"name":"getOutputKeyClass","returnType":"java.lang.Class","args":[],"exceptions":[]},"void setMapOutputValueClass(java.lang.Class)":{"name":"setMapOutputValueClass","returnType":"void","args":["java.lang.Class"],"exceptions":[]},"void deleteLocalFiles() throws java.io.IOException":{"name":"deleteLocalFiles","returnType":"void","args":[],"exceptions":["java.io.IOException"]},"java.lang.String getJobEndNotificationURI()":{"name":"getJobEndNotificationURI","returnType":"java.lang.String","args":[],"exceptions":[]},"void setProfileTaskRange(boolean, java.lang.String)":{"name":"setProfileTaskRange","returnType":"void","args":["boolean","java.lang.String"],"exceptions":[]},"void setOutputKeyComparatorClass(java.lang.Class)":{"name":"setOutputKeyComparatorClass","returnType":"void","args":["java.lang.Class"],"exceptions":[]},"void setMapDebugScript(java.lang.String)":{"name":"setMapDebugScript","returnType":"void","args":["java.lang.String"],"exceptions":[]},"void setKeyFieldComparatorOptions(java.lang.String)":{"name":"setKeyFieldComparatorOptions","returnType":"void","args":["java.lang.String"],"exceptions":[]},"int getMaxReduceTaskFailuresPercent()":{"name":"getMaxReduceTaskFailuresPercent","returnType":"int","args":[],"exceptions":[]},"org.apache.hadoop.mapred.OutputCommitter getOutputCommitter()":{"name":"getOutputCommitter","returnType":"org.apache.hadoop.mapred.OutputCommitter","args":[],"exceptions":[]},"boolean getSpeculativeExecution()":{"name":"getSpeculativeExecution","returnType":"boolean","args":[],"exceptions":[]},"long getMemoryForReduceTask()":{"name":"getMemoryForReduceTask","returnType":"long","args":[],"exceptions":[]},"void deleteLocalFiles(java.lang.String) throws java.io.IOException":{"name":"deleteLocalFiles","returnType":"void","args":["java.lang.String"],"exceptions":["java.io.IOException"]},"void setNumTasksToExecutePerJvm(int)":{"name":"setNumTasksToExecutePerJvm","returnType":"void","args":["int"],"exceptions":[]},"void setSessionId(java.lang.String)":{"name":"setSessionId","returnType":"void","args":["java.lang.String"],"exceptions":[]},"void setMaxVirtualMemoryForTask(long)":{"name":"setMaxVirtualMemoryForTask","returnType":"void","args":["long"],"exceptions":[]},"java.lang.Class getOutputValueClass()":{"name":"getOutputValueClass","returnType":"java.lang.Class","args":[],"exceptions":[]}}},"org.apache.hadoop.mapreduce.lib.input.NLineInputFormat":{"name":"org.apache.hadoop.mapreduce.lib.input.NLineInputFormat","methods":{"void setNumLinesPerSplit(org.apache.hadoop.mapreduce.Job, int)":{"name":"setNumLinesPerSplit","returnType":"void","args":["org.apache.hadoop.mapreduce.Job","int"],"exceptions":[]},"int getNumLinesPerSplit(org.apache.hadoop.mapreduce.JobContext)":{"name":"getNumLinesPerSplit","returnType":"int","args":["org.apache.hadoop.mapreduce.JobContext"],"exceptions":[]},"java.util.List getSplitsForFile(org.apache.hadoop.fs.FileStatus, org.apache.hadoop.conf.Configuration, int) throws java.io.IOException":{"name":"getSplitsForFile","returnType":"java.util.List","args":["org.apache.hadoop.fs.FileStatus","org.apache.hadoop.conf.Configuration","int"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.mapreduce.RecordReader createRecordReader(org.apache.hadoop.mapreduce.InputSplit, org.apache.hadoop.mapreduce.TaskAttemptContext) throws java.io.IOException":{"name":"createRecordReader","returnType":"org.apache.hadoop.mapreduce.RecordReader","args":["org.apache.hadoop.mapreduce.InputSplit","org.apache.hadoop.mapreduce.TaskAttemptContext"],"exceptions":["java.io.IOException"]},"java.util.List getSplits(org.apache.hadoop.mapreduce.JobContext) throws java.io.IOException":{"name":"getSplits","returnType":"java.util.List","args":["org.apache.hadoop.mapreduce.JobContext"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.mapred.lib.TokenCountMapper":{"name":"org.apache.hadoop.mapred.lib.TokenCountMapper","methods":{"void map(java.lang.Object, java.lang.Object, org.apache.hadoop.mapred.OutputCollector, org.apache.hadoop.mapred.Reporter) throws java.io.IOException":{"name":"map","returnType":"void","args":["java.lang.Object","java.lang.Object","org.apache.hadoop.mapred.OutputCollector","org.apache.hadoop.mapred.Reporter"],"exceptions":["java.io.IOException"]},"void map(java.lang.Object, org.apache.hadoop.io.Text, org.apache.hadoop.mapred.OutputCollector, org.apache.hadoop.mapred.Reporter) throws java.io.IOException":{"name":"map","returnType":"void","args":["java.lang.Object","org.apache.hadoop.io.Text","org.apache.hadoop.mapred.OutputCollector","org.apache.hadoop.mapred.Reporter"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.mapred.MapRunnable":{"name":"org.apache.hadoop.mapred.MapRunnable","methods":{"void run(org.apache.hadoop.mapred.RecordReader, org.apache.hadoop.mapred.OutputCollector, org.apache.hadoop.mapred.Reporter) throws java.io.IOException":{"name":"run","returnType":"void","args":["org.apache.hadoop.mapred.RecordReader","org.apache.hadoop.mapred.OutputCollector","org.apache.hadoop.mapred.Reporter"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.mapred.JobClient":{"name":"org.apache.hadoop.mapred.JobClient","methods":{"[Lorg.apache.hadoop.mapred.TaskReport; getMapTaskReports(java.lang.String) throws java.io.IOException":{"name":"getMapTaskReports","returnType":"[Lorg.apache.hadoop.mapred.TaskReport;","args":["java.lang.String"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.mapred.RunningJob getJob(java.lang.String) throws java.io.IOException":{"name":"getJob","returnType":"org.apache.hadoop.mapred.RunningJob","args":["java.lang.String"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.Path getStagingAreaDir() throws java.io.IOException":{"name":"getStagingAreaDir","returnType":"org.apache.hadoop.fs.Path","args":[],"exceptions":["java.io.IOException"]},"org.apache.hadoop.mapred.JobQueueInfo getQueueInfo(java.lang.String) throws java.io.IOException":{"name":"getQueueInfo","returnType":"org.apache.hadoop.mapred.JobQueueInfo","args":["java.lang.String"],"exceptions":["java.io.IOException"]},"[Lorg.apache.hadoop.mapred.TaskReport; getSetupTaskReports(org.apache.hadoop.mapred.JobID) throws java.io.IOException":{"name":"getSetupTaskReports","returnType":"[Lorg.apache.hadoop.mapred.TaskReport;","args":["org.apache.hadoop.mapred.JobID"],"exceptions":["java.io.IOException"]},"void displayTasks(org.apache.hadoop.mapred.JobID, java.lang.String, java.lang.String) throws java.io.IOException":{"name":"displayTasks","returnType":"void","args":["org.apache.hadoop.mapred.JobID","java.lang.String","java.lang.String"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.mapred.RunningJob submitJobInternal(org.apache.hadoop.mapred.JobConf) throws java.io.IOException, java.io.FileNotFoundException":{"name":"submitJobInternal","returnType":"org.apache.hadoop.mapred.RunningJob","args":["org.apache.hadoop.mapred.JobConf"],"exceptions":["java.io.IOException","java.io.FileNotFoundException"]},"long renewDelegationToken(org.apache.hadoop.security.token.Token) throws java.lang.InterruptedException, org.apache.hadoop.security.token.SecretManager$InvalidToken, java.io.IOException":{"name":"renewDelegationToken","returnType":"long","args":["org.apache.hadoop.security.token.Token"],"exceptions":["java.lang.InterruptedException","org.apache.hadoop.security.token.SecretManager$InvalidToken","java.io.IOException"]},"org.apache.hadoop.mapred.ClusterStatus getClusterStatus() throws java.io.IOException":{"name":"getClusterStatus","returnType":"org.apache.hadoop.mapred.ClusterStatus","args":[],"exceptions":["java.io.IOException"]},"[Lorg.apache.hadoop.mapred.TaskReport; getReduceTaskReports(java.lang.String) throws java.io.IOException":{"name":"getReduceTaskReports","returnType":"[Lorg.apache.hadoop.mapred.TaskReport;","args":["java.lang.String"],"exceptions":["java.io.IOException"]},"[Lorg.apache.hadoop.mapred.JobQueueInfo; getQueues() throws java.io.IOException":{"name":"getQueues","returnType":"[Lorg.apache.hadoop.mapred.JobQueueInfo;","args":[],"exceptions":["java.io.IOException"]},"[Lorg.apache.hadoop.mapred.JobStatus; getJobsFromQueue(java.lang.String) throws java.io.IOException":{"name":"getJobsFromQueue","returnType":"[Lorg.apache.hadoop.mapred.JobStatus;","args":["java.lang.String"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.mapreduce.Cluster getClusterHandle()":{"name":"getClusterHandle","returnType":"org.apache.hadoop.mapreduce.Cluster","args":[],"exceptions":[]},"org.apache.hadoop.mapred.RunningJob submitJob(org.apache.hadoop.mapred.JobConf) throws java.io.IOException, java.io.FileNotFoundException":{"name":"submitJob","returnType":"org.apache.hadoop.mapred.RunningJob","args":["org.apache.hadoop.mapred.JobConf"],"exceptions":["java.io.IOException","java.io.FileNotFoundException"]},"[Lorg.apache.hadoop.mapred.QueueAclsInfo; getQueueAclsForCurrentUser() throws java.io.IOException":{"name":"getQueueAclsForCurrentUser","returnType":"[Lorg.apache.hadoop.mapred.QueueAclsInfo;","args":[],"exceptions":["java.io.IOException"]},"[Lorg.apache.hadoop.mapred.TaskReport; getReduceTaskReports(org.apache.hadoop.mapred.JobID) throws java.io.IOException":{"name":"getReduceTaskReports","returnType":"[Lorg.apache.hadoop.mapred.TaskReport;","args":["org.apache.hadoop.mapred.JobID"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.security.token.Token getDelegationToken(org.apache.hadoop.io.Text) throws java.lang.InterruptedException, java.io.IOException":{"name":"getDelegationToken","returnType":"org.apache.hadoop.security.token.Token","args":["org.apache.hadoop.io.Text"],"exceptions":["java.lang.InterruptedException","java.io.IOException"]},"org.apache.hadoop.fs.Path getSystemDir()":{"name":"getSystemDir","returnType":"org.apache.hadoop.fs.Path","args":[],"exceptions":[]},"void main([Ljava.lang.String;) throws java.lang.Exception":{"name":"main","returnType":"void","args":["[Ljava.lang.String;"],"exceptions":["java.lang.Exception"]},"boolean monitorAndPrintJob(org.apache.hadoop.mapred.JobConf, org.apache.hadoop.mapred.RunningJob) throws java.lang.InterruptedException, java.io.IOException":{"name":"monitorAndPrintJob","returnType":"boolean","args":["org.apache.hadoop.mapred.JobConf","org.apache.hadoop.mapred.RunningJob"],"exceptions":["java.lang.InterruptedException","java.io.IOException"]},"[Lorg.apache.hadoop.mapred.JobQueueInfo; getRootQueues() throws java.io.IOException":{"name":"getRootQueues","returnType":"[Lorg.apache.hadoop.mapred.JobQueueInfo;","args":[],"exceptions":["java.io.IOException"]},"org.apache.hadoop.mapred.RunningJob submitJob(java.lang.String) throws org.apache.hadoop.mapred.InvalidJobConfException, java.io.IOException, java.io.FileNotFoundException":{"name":"submitJob","returnType":"org.apache.hadoop.mapred.RunningJob","args":["java.lang.String"],"exceptions":["org.apache.hadoop.mapred.InvalidJobConfException","java.io.IOException","java.io.FileNotFoundException"]},"[Lorg.apache.hadoop.mapred.JobStatus; getAllJobs() throws java.io.IOException":{"name":"getAllJobs","returnType":"[Lorg.apache.hadoop.mapred.JobStatus;","args":[],"exceptions":["java.io.IOException"]},"org.apache.hadoop.mapred.RunningJob getJob(org.apache.hadoop.mapred.JobID) throws java.io.IOException":{"name":"getJob","returnType":"org.apache.hadoop.mapred.RunningJob","args":["org.apache.hadoop.mapred.JobID"],"exceptions":["java.io.IOException"]},"void init(org.apache.hadoop.mapred.JobConf) throws java.io.IOException":{"name":"init","returnType":"void","args":["org.apache.hadoop.mapred.JobConf"],"exceptions":["java.io.IOException"]},"void setTaskOutputFilter(org.apache.hadoop.mapred.JobClient$TaskStatusFilter)":{"name":"setTaskOutputFilter","returnType":"void","args":["org.apache.hadoop.mapred.JobClient$TaskStatusFilter"],"exceptions":[]},"[Lorg.apache.hadoop.mapred.JobStatus; jobsToComplete() throws java.io.IOException":{"name":"jobsToComplete","returnType":"[Lorg.apache.hadoop.mapred.JobStatus;","args":[],"exceptions":["java.io.IOException"]},"boolean isJobDirValid(org.apache.hadoop.fs.Path, org.apache.hadoop.fs.FileSystem) throws java.io.IOException":{"name":"isJobDirValid","returnType":"boolean","args":["org.apache.hadoop.fs.Path","org.apache.hadoop.fs.FileSystem"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.mapred.JobClient$TaskStatusFilter getTaskOutputFilter()":{"name":"getTaskOutputFilter","returnType":"org.apache.hadoop.mapred.JobClient$TaskStatusFilter","args":[],"exceptions":[]},"[Lorg.apache.hadoop.mapred.JobQueueInfo; getChildQueues(java.lang.String) throws java.io.IOException":{"name":"getChildQueues","returnType":"[Lorg.apache.hadoop.mapred.JobQueueInfo;","args":["java.lang.String"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.mapred.ClusterStatus getClusterStatus(boolean) throws java.io.IOException":{"name":"getClusterStatus","returnType":"org.apache.hadoop.mapred.ClusterStatus","args":["boolean"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.FileSystem getFs() throws java.io.IOException":{"name":"getFs","returnType":"org.apache.hadoop.fs.FileSystem","args":[],"exceptions":["java.io.IOException"]},"org.apache.hadoop.mapred.JobClient$TaskStatusFilter getTaskOutputFilter(org.apache.hadoop.mapred.JobConf)":{"name":"getTaskOutputFilter","returnType":"org.apache.hadoop.mapred.JobClient$TaskStatusFilter","args":["org.apache.hadoop.mapred.JobConf"],"exceptions":[]},"[Lorg.apache.hadoop.mapred.TaskReport; getCleanupTaskReports(org.apache.hadoop.mapred.JobID) throws java.io.IOException":{"name":"getCleanupTaskReports","returnType":"[Lorg.apache.hadoop.mapred.TaskReport;","args":["org.apache.hadoop.mapred.JobID"],"exceptions":["java.io.IOException"]},"void setTaskOutputFilter(org.apache.hadoop.mapred.JobConf, org.apache.hadoop.mapred.JobClient$TaskStatusFilter)":{"name":"setTaskOutputFilter","returnType":"void","args":["org.apache.hadoop.mapred.JobConf","org.apache.hadoop.mapred.JobClient$TaskStatusFilter"],"exceptions":[]},"int getDefaultReduces() throws java.io.IOException":{"name":"getDefaultReduces","returnType":"int","args":[],"exceptions":["java.io.IOException"]},"org.apache.hadoop.mapred.RunningJob runJob(org.apache.hadoop.mapred.JobConf) throws java.io.IOException":{"name":"runJob","returnType":"org.apache.hadoop.mapred.RunningJob","args":["org.apache.hadoop.mapred.JobConf"],"exceptions":["java.io.IOException"]},"int getDefaultMaps() throws java.io.IOException":{"name":"getDefaultMaps","returnType":"int","args":[],"exceptions":["java.io.IOException"]},"[Lorg.apache.hadoop.mapred.TaskReport; getMapTaskReports(org.apache.hadoop.mapred.JobID) throws java.io.IOException":{"name":"getMapTaskReports","returnType":"[Lorg.apache.hadoop.mapred.TaskReport;","args":["org.apache.hadoop.mapred.JobID"],"exceptions":["java.io.IOException"]},"void close() throws java.io.IOException":{"name":"close","returnType":"void","args":[],"exceptions":["java.io.IOException"]},"void cancelDelegationToken(org.apache.hadoop.security.token.Token) throws java.lang.InterruptedException, org.apache.hadoop.security.token.SecretManager$InvalidToken, java.io.IOException":{"name":"cancelDelegationToken","returnType":"void","args":["org.apache.hadoop.security.token.Token"],"exceptions":["java.lang.InterruptedException","org.apache.hadoop.security.token.SecretManager$InvalidToken","java.io.IOException"]}}},"org.apache.hadoop.mapreduce.RecordWriter":{"name":"org.apache.hadoop.mapreduce.RecordWriter","methods":{"void close(org.apache.hadoop.mapreduce.TaskAttemptContext) throws java.lang.InterruptedException, java.io.IOException":{"name":"close","returnType":"void","args":["org.apache.hadoop.mapreduce.TaskAttemptContext"],"exceptions":["java.lang.InterruptedException","java.io.IOException"]},"void write(java.lang.Object, java.lang.Object) throws java.lang.InterruptedException, java.io.IOException":{"name":"write","returnType":"void","args":["java.lang.Object","java.lang.Object"],"exceptions":["java.lang.InterruptedException","java.io.IOException"]}}},"org.apache.hadoop.mapred.lib.FieldSelectionMapReduce":{"name":"org.apache.hadoop.mapred.lib.FieldSelectionMapReduce","methods":{"void map(java.lang.Object, java.lang.Object, org.apache.hadoop.mapred.OutputCollector, org.apache.hadoop.mapred.Reporter) throws java.io.IOException":{"name":"map","returnType":"void","args":["java.lang.Object","java.lang.Object","org.apache.hadoop.mapred.OutputCollector","org.apache.hadoop.mapred.Reporter"],"exceptions":["java.io.IOException"]},"void reduce(java.lang.Object, java.util.Iterator, org.apache.hadoop.mapred.OutputCollector, org.apache.hadoop.mapred.Reporter) throws java.io.IOException":{"name":"reduce","returnType":"void","args":["java.lang.Object","java.util.Iterator","org.apache.hadoop.mapred.OutputCollector","org.apache.hadoop.mapred.Reporter"],"exceptions":["java.io.IOException"]},"void reduce(org.apache.hadoop.io.Text, java.util.Iterator, org.apache.hadoop.mapred.OutputCollector, org.apache.hadoop.mapred.Reporter) throws java.io.IOException":{"name":"reduce","returnType":"void","args":["org.apache.hadoop.io.Text","java.util.Iterator","org.apache.hadoop.mapred.OutputCollector","org.apache.hadoop.mapred.Reporter"],"exceptions":["java.io.IOException"]},"void configure(org.apache.hadoop.mapred.JobConf)":{"name":"configure","returnType":"void","args":["org.apache.hadoop.mapred.JobConf"],"exceptions":[]},"void close() throws java.io.IOException":{"name":"close","returnType":"void","args":[],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.mapred.KeyValueLineRecordReader":{"name":"org.apache.hadoop.mapred.KeyValueLineRecordReader","methods":{"int findSeparator([B, int, int, byte)":{"name":"findSeparator","returnType":"int","args":["[B","int","int","byte"],"exceptions":[]},"java.lang.Class getKeyClass()":{"name":"getKeyClass","returnType":"java.lang.Class","args":[],"exceptions":[]},"boolean next(org.apache.hadoop.io.Text, org.apache.hadoop.io.Text) throws java.io.IOException":{"name":"next","returnType":"boolean","args":["org.apache.hadoop.io.Text","org.apache.hadoop.io.Text"],"exceptions":["java.io.IOException"]},"long getPos() throws java.io.IOException":{"name":"getPos","returnType":"long","args":[],"exceptions":["java.io.IOException"]},"float getProgress() throws java.io.IOException":{"name":"getProgress","returnType":"float","args":[],"exceptions":["java.io.IOException"]},"java.lang.Object createKey()":{"name":"createKey","returnType":"java.lang.Object","args":[],"exceptions":[]},"java.lang.Object createValue()":{"name":"createValue","returnType":"java.lang.Object","args":[],"exceptions":[]},"org.apache.hadoop.io.Text createValue()":{"name":"createValue","returnType":"org.apache.hadoop.io.Text","args":[],"exceptions":[]},"org.apache.hadoop.io.Text createKey()":{"name":"createKey","returnType":"org.apache.hadoop.io.Text","args":[],"exceptions":[]},"boolean next(java.lang.Object, java.lang.Object) throws java.io.IOException":{"name":"next","returnType":"boolean","args":["java.lang.Object","java.lang.Object"],"exceptions":["java.io.IOException"]},"void close() throws java.io.IOException":{"name":"close","returnType":"void","args":[],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.mapreduce.lib.aggregate.UniqValueCount":{"name":"org.apache.hadoop.mapreduce.lib.aggregate.UniqValueCount","methods":{"java.util.Set getUniqueItems()":{"name":"getUniqueItems","returnType":"java.util.Set","args":[],"exceptions":[]},"void reset()":{"name":"reset","returnType":"void","args":[],"exceptions":[]},"long setMaxItems(long)":{"name":"setMaxItems","returnType":"long","args":["long"],"exceptions":[]},"java.util.ArrayList getCombinerOutput()":{"name":"getCombinerOutput","returnType":"java.util.ArrayList","args":[],"exceptions":[]},"java.lang.String getReport()":{"name":"getReport","returnType":"java.lang.String","args":[],"exceptions":[]},"void addNextValue(java.lang.Object)":{"name":"addNextValue","returnType":"void","args":["java.lang.Object"],"exceptions":[]}}},"org.apache.hadoop.mapreduce.lib.fieldsel.FieldSelectionHelper":{"name":"org.apache.hadoop.mapreduce.lib.fieldsel.FieldSelectionHelper","methods":{"java.lang.String specToString(java.lang.String, java.lang.String, int, java.util.List, java.util.List)":{"name":"specToString","returnType":"java.lang.String","args":["java.lang.String","java.lang.String","int","java.util.List","java.util.List"],"exceptions":[]},"org.apache.hadoop.io.Text getKey()":{"name":"getKey","returnType":"org.apache.hadoop.io.Text","args":[],"exceptions":[]},"org.apache.hadoop.io.Text getValue()":{"name":"getValue","returnType":"org.apache.hadoop.io.Text","args":[],"exceptions":[]},"int parseOutputKeyValueSpec(java.lang.String, java.util.List, java.util.List)":{"name":"parseOutputKeyValueSpec","returnType":"int","args":["java.lang.String","java.util.List","java.util.List"],"exceptions":[]},"void extractOutputKeyValue(java.lang.String, java.lang.String, java.lang.String, java.util.List, java.util.List, int, boolean, boolean)":{"name":"extractOutputKeyValue","returnType":"void","args":["java.lang.String","java.lang.String","java.lang.String","java.util.List","java.util.List","int","boolean","boolean"],"exceptions":[]}}},"org.apache.hadoop.mapred.MapFileOutputFormat":{"name":"org.apache.hadoop.mapred.MapFileOutputFormat","methods":{"[Lorg.apache.hadoop.io.MapFile$Reader; getReaders(org.apache.hadoop.fs.FileSystem, org.apache.hadoop.fs.Path, org.apache.hadoop.conf.Configuration) throws java.io.IOException":{"name":"getReaders","returnType":"[Lorg.apache.hadoop.io.MapFile$Reader;","args":["org.apache.hadoop.fs.FileSystem","org.apache.hadoop.fs.Path","org.apache.hadoop.conf.Configuration"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.io.Writable getEntry([Lorg.apache.hadoop.io.MapFile$Reader;, org.apache.hadoop.mapred.Partitioner, org.apache.hadoop.io.WritableComparable, org.apache.hadoop.io.Writable) throws java.io.IOException":{"name":"getEntry","returnType":"org.apache.hadoop.io.Writable","args":["[Lorg.apache.hadoop.io.MapFile$Reader;","org.apache.hadoop.mapred.Partitioner","org.apache.hadoop.io.WritableComparable","org.apache.hadoop.io.Writable"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.mapred.RecordWriter getRecordWriter(org.apache.hadoop.fs.FileSystem, org.apache.hadoop.mapred.JobConf, java.lang.String, org.apache.hadoop.util.Progressable) throws java.io.IOException":{"name":"getRecordWriter","returnType":"org.apache.hadoop.mapred.RecordWriter","args":["org.apache.hadoop.fs.FileSystem","org.apache.hadoop.mapred.JobConf","java.lang.String","org.apache.hadoop.util.Progressable"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.mapred.Utils":{"name":"org.apache.hadoop.mapred.Utils","methods":{}},"org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter":{"name":"org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter","methods":{"org.apache.hadoop.fs.Path getWorkPath() throws java.io.IOException":{"name":"getWorkPath","returnType":"org.apache.hadoop.fs.Path","args":[],"exceptions":["java.io.IOException"]},"void commitJob(org.apache.hadoop.mapreduce.JobContext) throws java.io.IOException":{"name":"commitJob","returnType":"void","args":["org.apache.hadoop.mapreduce.JobContext"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.Path getJobAttemptPath(org.apache.hadoop.mapreduce.JobContext)":{"name":"getJobAttemptPath","returnType":"org.apache.hadoop.fs.Path","args":["org.apache.hadoop.mapreduce.JobContext"],"exceptions":[]},"org.apache.hadoop.fs.Path getCommittedTaskPath(org.apache.hadoop.mapreduce.TaskAttemptContext, org.apache.hadoop.fs.Path)":{"name":"getCommittedTaskPath","returnType":"org.apache.hadoop.fs.Path","args":["org.apache.hadoop.mapreduce.TaskAttemptContext","org.apache.hadoop.fs.Path"],"exceptions":[]},"void commitTask(org.apache.hadoop.mapreduce.TaskAttemptContext, org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"commitTask","returnType":"void","args":["org.apache.hadoop.mapreduce.TaskAttemptContext","org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.Path getJobAttemptPath(org.apache.hadoop.mapreduce.JobContext, org.apache.hadoop.fs.Path)":{"name":"getJobAttemptPath","returnType":"org.apache.hadoop.fs.Path","args":["org.apache.hadoop.mapreduce.JobContext","org.apache.hadoop.fs.Path"],"exceptions":[]},"void abortTask(org.apache.hadoop.mapreduce.TaskAttemptContext) throws java.io.IOException":{"name":"abortTask","returnType":"void","args":["org.apache.hadoop.mapreduce.TaskAttemptContext"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.Path getTaskAttemptPath(org.apache.hadoop.mapreduce.TaskAttemptContext, org.apache.hadoop.fs.Path)":{"name":"getTaskAttemptPath","returnType":"org.apache.hadoop.fs.Path","args":["org.apache.hadoop.mapreduce.TaskAttemptContext","org.apache.hadoop.fs.Path"],"exceptions":[]},"void recoverTask(org.apache.hadoop.mapreduce.TaskAttemptContext) throws java.io.IOException":{"name":"recoverTask","returnType":"void","args":["org.apache.hadoop.mapreduce.TaskAttemptContext"],"exceptions":["java.io.IOException"]},"void cleanupJob(org.apache.hadoop.mapreduce.JobContext) throws java.io.IOException":{"name":"cleanupJob","returnType":"void","args":["org.apache.hadoop.mapreduce.JobContext"],"exceptions":["java.io.IOException"]},"void abortJob(org.apache.hadoop.mapreduce.JobContext, org.apache.hadoop.mapreduce.JobStatus$State) throws java.io.IOException":{"name":"abortJob","returnType":"void","args":["org.apache.hadoop.mapreduce.JobContext","org.apache.hadoop.mapreduce.JobStatus$State"],"exceptions":["java.io.IOException"]},"boolean isCommitJobRepeatable(org.apache.hadoop.mapreduce.JobContext) throws java.io.IOException":{"name":"isCommitJobRepeatable","returnType":"boolean","args":["org.apache.hadoop.mapreduce.JobContext"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.Path getTaskAttemptPath(org.apache.hadoop.mapreduce.TaskAttemptContext)":{"name":"getTaskAttemptPath","returnType":"org.apache.hadoop.fs.Path","args":["org.apache.hadoop.mapreduce.TaskAttemptContext"],"exceptions":[]},"org.apache.hadoop.fs.Path getCommittedTaskPath(org.apache.hadoop.mapreduce.TaskAttemptContext)":{"name":"getCommittedTaskPath","returnType":"org.apache.hadoop.fs.Path","args":["org.apache.hadoop.mapreduce.TaskAttemptContext"],"exceptions":[]},"boolean needsTaskCommit(org.apache.hadoop.mapreduce.TaskAttemptContext, org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"needsTaskCommit","returnType":"boolean","args":["org.apache.hadoop.mapreduce.TaskAttemptContext","org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"boolean needsTaskCommit(org.apache.hadoop.mapreduce.TaskAttemptContext) throws java.io.IOException":{"name":"needsTaskCommit","returnType":"boolean","args":["org.apache.hadoop.mapreduce.TaskAttemptContext"],"exceptions":["java.io.IOException"]},"void setupJob(org.apache.hadoop.mapreduce.JobContext) throws java.io.IOException":{"name":"setupJob","returnType":"void","args":["org.apache.hadoop.mapreduce.JobContext"],"exceptions":["java.io.IOException"]},"void setupTask(org.apache.hadoop.mapreduce.TaskAttemptContext) throws java.io.IOException":{"name":"setupTask","returnType":"void","args":["org.apache.hadoop.mapreduce.TaskAttemptContext"],"exceptions":["java.io.IOException"]},"void commitTask(org.apache.hadoop.mapreduce.TaskAttemptContext) throws java.io.IOException":{"name":"commitTask","returnType":"void","args":["org.apache.hadoop.mapreduce.TaskAttemptContext"],"exceptions":["java.io.IOException"]},"void abortTask(org.apache.hadoop.mapreduce.TaskAttemptContext, org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"abortTask","returnType":"void","args":["org.apache.hadoop.mapreduce.TaskAttemptContext","org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"boolean isRecoverySupported()":{"name":"isRecoverySupported","returnType":"boolean","args":[],"exceptions":[]}}},"org.apache.hadoop.mapreduce.lib.aggregate.LongValueMax":{"name":"org.apache.hadoop.mapreduce.lib.aggregate.LongValueMax","methods":{"void reset()":{"name":"reset","returnType":"void","args":[],"exceptions":[]},"long getVal()":{"name":"getVal","returnType":"long","args":[],"exceptions":[]},"java.util.ArrayList getCombinerOutput()":{"name":"getCombinerOutput","returnType":"java.util.ArrayList","args":[],"exceptions":[]},"void addNextValue(long)":{"name":"addNextValue","returnType":"void","args":["long"],"exceptions":[]},"java.lang.String getReport()":{"name":"getReport","returnType":"java.lang.String","args":[],"exceptions":[]},"void addNextValue(java.lang.Object)":{"name":"addNextValue","returnType":"void","args":["java.lang.Object"],"exceptions":[]}}},"org.apache.hadoop.mapred.FileSplit":{"name":"org.apache.hadoop.mapred.FileSplit","methods":{"long getStart()":{"name":"getStart","returnType":"long","args":[],"exceptions":[]},"[Ljava.lang.String; getLocations() throws java.io.IOException":{"name":"getLocations","returnType":"[Ljava.lang.String;","args":[],"exceptions":["java.io.IOException"]},"[Lorg.apache.hadoop.mapred.SplitLocationInfo; getLocationInfo() throws java.io.IOException":{"name":"getLocationInfo","returnType":"[Lorg.apache.hadoop.mapred.SplitLocationInfo;","args":[],"exceptions":["java.io.IOException"]},"long getLength()":{"name":"getLength","returnType":"long","args":[],"exceptions":[]},"java.lang.String toString()":{"name":"toString","returnType":"java.lang.String","args":[],"exceptions":[]},"void write(java.io.DataOutput) throws java.io.IOException":{"name":"write","returnType":"void","args":["java.io.DataOutput"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.Path getPath()":{"name":"getPath","returnType":"org.apache.hadoop.fs.Path","args":[],"exceptions":[]},"void readFields(java.io.DataInput) throws java.io.IOException":{"name":"readFields","returnType":"void","args":["java.io.DataInput"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.mapred.SequenceFileInputFormat":{"name":"org.apache.hadoop.mapred.SequenceFileInputFormat","methods":{"org.apache.hadoop.mapred.RecordReader getRecordReader(org.apache.hadoop.mapred.InputSplit, org.apache.hadoop.mapred.JobConf, org.apache.hadoop.mapred.Reporter) throws java.io.IOException":{"name":"getRecordReader","returnType":"org.apache.hadoop.mapred.RecordReader","args":["org.apache.hadoop.mapred.InputSplit","org.apache.hadoop.mapred.JobConf","org.apache.hadoop.mapred.Reporter"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.mapred.TaskCompletionEvent":{"name":"org.apache.hadoop.mapred.TaskCompletionEvent","methods":{"void setTaskID(org.apache.hadoop.mapred.TaskAttemptID)":{"name":"setTaskID","returnType":"void","args":["org.apache.hadoop.mapred.TaskAttemptID"],"exceptions":[]},"void setTaskRunTime(int)":{"name":"setTaskRunTime","returnType":"void","args":["int"],"exceptions":[]},"org.apache.hadoop.mapreduce.TaskAttemptID getTaskAttemptId()":{"name":"getTaskAttemptId","returnType":"org.apache.hadoop.mapreduce.TaskAttemptID","args":[],"exceptions":[]},"org.apache.hadoop.mapred.TaskAttemptID getTaskAttemptId()":{"name":"getTaskAttemptId","returnType":"org.apache.hadoop.mapred.TaskAttemptID","args":[],"exceptions":[]},"void setTaskId(java.lang.String)":{"name":"setTaskId","returnType":"void","args":["java.lang.String"],"exceptions":[]},"void setTaskTrackerHttp(java.lang.String)":{"name":"setTaskTrackerHttp","returnType":"void","args":["java.lang.String"],"exceptions":[]},"void setEventId(int)":{"name":"setEventId","returnType":"void","args":["int"],"exceptions":[]},"org.apache.hadoop.mapred.TaskCompletionEvent downgrade(org.apache.hadoop.mapreduce.TaskCompletionEvent)":{"name":"downgrade","returnType":"org.apache.hadoop.mapred.TaskCompletionEvent","args":["org.apache.hadoop.mapreduce.TaskCompletionEvent"],"exceptions":[]},"java.lang.String getTaskId()":{"name":"getTaskId","returnType":"java.lang.String","args":[],"exceptions":[]},"void setTaskStatus(org.apache.hadoop.mapred.TaskCompletionEvent$Status)":{"name":"setTaskStatus","returnType":"void","args":["org.apache.hadoop.mapred.TaskCompletionEvent$Status"],"exceptions":[]},"org.apache.hadoop.mapred.TaskCompletionEvent$Status getTaskStatus()":{"name":"getTaskStatus","returnType":"org.apache.hadoop.mapred.TaskCompletionEvent$Status","args":[],"exceptions":[]}}},"org.apache.hadoop.mapred.lib.IdentityMapper":{"name":"org.apache.hadoop.mapred.lib.IdentityMapper","methods":{"void map(java.lang.Object, java.lang.Object, org.apache.hadoop.mapred.OutputCollector, org.apache.hadoop.mapred.Reporter) throws java.io.IOException":{"name":"map","returnType":"void","args":["java.lang.Object","java.lang.Object","org.apache.hadoop.mapred.OutputCollector","org.apache.hadoop.mapred.Reporter"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.mapred.SequenceFileAsBinaryInputFormat":{"name":"org.apache.hadoop.mapred.SequenceFileAsBinaryInputFormat","methods":{"org.apache.hadoop.mapred.RecordReader getRecordReader(org.apache.hadoop.mapred.InputSplit, org.apache.hadoop.mapred.JobConf, org.apache.hadoop.mapred.Reporter) throws java.io.IOException":{"name":"getRecordReader","returnType":"org.apache.hadoop.mapred.RecordReader","args":["org.apache.hadoop.mapred.InputSplit","org.apache.hadoop.mapred.JobConf","org.apache.hadoop.mapred.Reporter"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.mapred.lib.aggregate.ValueAggregatorReducer":{"name":"org.apache.hadoop.mapred.lib.aggregate.ValueAggregatorReducer","methods":{"void map(java.lang.Object, java.lang.Object, org.apache.hadoop.mapred.OutputCollector, org.apache.hadoop.mapred.Reporter) throws java.io.IOException":{"name":"map","returnType":"void","args":["java.lang.Object","java.lang.Object","org.apache.hadoop.mapred.OutputCollector","org.apache.hadoop.mapred.Reporter"],"exceptions":["java.io.IOException"]},"void reduce(java.lang.Object, java.util.Iterator, org.apache.hadoop.mapred.OutputCollector, org.apache.hadoop.mapred.Reporter) throws java.io.IOException":{"name":"reduce","returnType":"void","args":["java.lang.Object","java.util.Iterator","org.apache.hadoop.mapred.OutputCollector","org.apache.hadoop.mapred.Reporter"],"exceptions":["java.io.IOException"]},"void map(org.apache.hadoop.io.WritableComparable, org.apache.hadoop.io.Writable, org.apache.hadoop.mapred.OutputCollector, org.apache.hadoop.mapred.Reporter) throws java.io.IOException":{"name":"map","returnType":"void","args":["org.apache.hadoop.io.WritableComparable","org.apache.hadoop.io.Writable","org.apache.hadoop.mapred.OutputCollector","org.apache.hadoop.mapred.Reporter"],"exceptions":["java.io.IOException"]},"void reduce(org.apache.hadoop.io.Text, java.util.Iterator, org.apache.hadoop.mapred.OutputCollector, org.apache.hadoop.mapred.Reporter) throws java.io.IOException":{"name":"reduce","returnType":"void","args":["org.apache.hadoop.io.Text","java.util.Iterator","org.apache.hadoop.mapred.OutputCollector","org.apache.hadoop.mapred.Reporter"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.mapreduce.Counters":{"name":"org.apache.hadoop.mapreduce.Counters","methods":{}},"org.apache.hadoop.mapred.join.WrappedRecordReader":{"name":"org.apache.hadoop.mapred.join.WrappedRecordReader","methods":{"int hashCode()":{"name":"hashCode","returnType":"int","args":[],"exceptions":[]},"int id()":{"name":"id","returnType":"int","args":[],"exceptions":[]},"long getPos() throws java.io.IOException":{"name":"getPos","returnType":"long","args":[],"exceptions":["java.io.IOException"]},"float getProgress() throws java.io.IOException":{"name":"getProgress","returnType":"float","args":[],"exceptions":["java.io.IOException"]},"org.apache.hadoop.conf.Configuration getConf()":{"name":"getConf","returnType":"org.apache.hadoop.conf.Configuration","args":[],"exceptions":[]},"java.lang.Object createValue()":{"name":"createValue","returnType":"java.lang.Object","args":[],"exceptions":[]},"void accept(org.apache.hadoop.mapred.join.CompositeRecordReader$JoinCollector, org.apache.hadoop.io.WritableComparable) throws java.io.IOException":{"name":"accept","returnType":"void","args":["org.apache.hadoop.mapred.join.CompositeRecordReader$JoinCollector","org.apache.hadoop.io.WritableComparable"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.io.WritableComparable key()":{"name":"key","returnType":"org.apache.hadoop.io.WritableComparable","args":[],"exceptions":[]},"void skip(org.apache.hadoop.io.WritableComparable) throws java.io.IOException":{"name":"skip","returnType":"void","args":["org.apache.hadoop.io.WritableComparable"],"exceptions":["java.io.IOException"]},"boolean next(org.apache.hadoop.io.WritableComparable, org.apache.hadoop.io.Writable) throws java.io.IOException":{"name":"next","returnType":"boolean","args":["org.apache.hadoop.io.WritableComparable","org.apache.hadoop.io.Writable"],"exceptions":["java.io.IOException"]},"int compareTo(java.lang.Object)":{"name":"compareTo","returnType":"int","args":["java.lang.Object"],"exceptions":[]},"boolean hasNext()":{"name":"hasNext","returnType":"boolean","args":[],"exceptions":[]},"boolean next(java.lang.Object, java.lang.Object) throws java.io.IOException":{"name":"next","returnType":"boolean","args":["java.lang.Object","java.lang.Object"],"exceptions":["java.io.IOException"]},"boolean equals(java.lang.Object)":{"name":"equals","returnType":"boolean","args":["java.lang.Object"],"exceptions":[]},"int compareTo(org.apache.hadoop.mapred.join.ComposableRecordReader)":{"name":"compareTo","returnType":"int","args":["org.apache.hadoop.mapred.join.ComposableRecordReader"],"exceptions":[]},"java.lang.Object createKey()":{"name":"createKey","returnType":"java.lang.Object","args":[],"exceptions":[]},"org.apache.hadoop.io.WritableComparable createKey()":{"name":"createKey","returnType":"org.apache.hadoop.io.WritableComparable","args":[],"exceptions":[]},"void setConf(org.apache.hadoop.conf.Configuration)":{"name":"setConf","returnType":"void","args":["org.apache.hadoop.conf.Configuration"],"exceptions":[]},"org.apache.hadoop.io.Writable createValue()":{"name":"createValue","returnType":"org.apache.hadoop.io.Writable","args":[],"exceptions":[]},"void close() throws java.io.IOException":{"name":"close","returnType":"void","args":[],"exceptions":["java.io.IOException"]},"void key(org.apache.hadoop.io.WritableComparable) throws java.io.IOException":{"name":"key","returnType":"void","args":["org.apache.hadoop.io.WritableComparable"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.mapreduce.TaskAttemptID":{"name":"org.apache.hadoop.mapreduce.TaskAttemptID","methods":{"int hashCode()":{"name":"hashCode","returnType":"int","args":[],"exceptions":[]},"int compareTo(org.apache.hadoop.mapreduce.ID)":{"name":"compareTo","returnType":"int","args":["org.apache.hadoop.mapreduce.ID"],"exceptions":[]},"java.lang.String toString()":{"name":"toString","returnType":"java.lang.String","args":[],"exceptions":[]},"void write(java.io.DataOutput) throws java.io.IOException":{"name":"write","returnType":"void","args":["java.io.DataOutput"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.mapreduce.TaskID getTaskID()":{"name":"getTaskID","returnType":"org.apache.hadoop.mapreduce.TaskID","args":[],"exceptions":[]},"boolean isMap()":{"name":"isMap","returnType":"boolean","args":[],"exceptions":[]},"org.apache.hadoop.mapreduce.JobID getJobID()":{"name":"getJobID","returnType":"org.apache.hadoop.mapreduce.JobID","args":[],"exceptions":[]},"org.apache.hadoop.mapreduce.TaskAttemptID forName(java.lang.String) throws java.lang.IllegalArgumentException":{"name":"forName","returnType":"org.apache.hadoop.mapreduce.TaskAttemptID","args":["java.lang.String"],"exceptions":["java.lang.IllegalArgumentException"]},"int compareTo(java.lang.Object)":{"name":"compareTo","returnType":"int","args":["java.lang.Object"],"exceptions":[]},"org.apache.hadoop.mapreduce.TaskType getTaskType()":{"name":"getTaskType","returnType":"org.apache.hadoop.mapreduce.TaskType","args":[],"exceptions":[]},"boolean equals(java.lang.Object)":{"name":"equals","returnType":"boolean","args":["java.lang.Object"],"exceptions":[]},"void readFields(java.io.DataInput) throws java.io.IOException":{"name":"readFields","returnType":"void","args":["java.io.DataInput"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.mapred.lib.LazyOutputFormat":{"name":"org.apache.hadoop.mapred.lib.LazyOutputFormat","methods":{"void setOutputFormatClass(org.apache.hadoop.mapred.JobConf, java.lang.Class)":{"name":"setOutputFormatClass","returnType":"void","args":["org.apache.hadoop.mapred.JobConf","java.lang.Class"],"exceptions":[]},"void checkOutputSpecs(org.apache.hadoop.fs.FileSystem, org.apache.hadoop.mapred.JobConf) throws java.io.IOException":{"name":"checkOutputSpecs","returnType":"void","args":["org.apache.hadoop.fs.FileSystem","org.apache.hadoop.mapred.JobConf"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.mapred.RecordWriter getRecordWriter(org.apache.hadoop.fs.FileSystem, org.apache.hadoop.mapred.JobConf, java.lang.String, org.apache.hadoop.util.Progressable) throws java.io.IOException":{"name":"getRecordWriter","returnType":"org.apache.hadoop.mapred.RecordWriter","args":["org.apache.hadoop.fs.FileSystem","org.apache.hadoop.mapred.JobConf","java.lang.String","org.apache.hadoop.util.Progressable"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.mapreduce.lib.db.DBOutputFormat":{"name":"org.apache.hadoop.mapreduce.lib.db.DBOutputFormat","methods":{"void setOutput(org.apache.hadoop.mapreduce.Job, java.lang.String, [Ljava.lang.String;) throws java.io.IOException":{"name":"setOutput","returnType":"void","args":["org.apache.hadoop.mapreduce.Job","java.lang.String","[Ljava.lang.String;"],"exceptions":["java.io.IOException"]},"void setOutput(org.apache.hadoop.mapreduce.Job, java.lang.String, int) throws java.io.IOException":{"name":"setOutput","returnType":"void","args":["org.apache.hadoop.mapreduce.Job","java.lang.String","int"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.mapreduce.RecordWriter getRecordWriter(org.apache.hadoop.mapreduce.TaskAttemptContext) throws java.io.IOException":{"name":"getRecordWriter","returnType":"org.apache.hadoop.mapreduce.RecordWriter","args":["org.apache.hadoop.mapreduce.TaskAttemptContext"],"exceptions":["java.io.IOException"]},"java.lang.String constructQuery(java.lang.String, [Ljava.lang.String;)":{"name":"constructQuery","returnType":"java.lang.String","args":["java.lang.String","[Ljava.lang.String;"],"exceptions":[]},"org.apache.hadoop.mapreduce.OutputCommitter getOutputCommitter(org.apache.hadoop.mapreduce.TaskAttemptContext) throws java.lang.InterruptedException, java.io.IOException":{"name":"getOutputCommitter","returnType":"org.apache.hadoop.mapreduce.OutputCommitter","args":["org.apache.hadoop.mapreduce.TaskAttemptContext"],"exceptions":["java.lang.InterruptedException","java.io.IOException"]},"void checkOutputSpecs(org.apache.hadoop.mapreduce.JobContext) throws java.lang.InterruptedException, java.io.IOException":{"name":"checkOutputSpecs","returnType":"void","args":["org.apache.hadoop.mapreduce.JobContext"],"exceptions":["java.lang.InterruptedException","java.io.IOException"]}}},"org.apache.hadoop.mapred.InvalidJobConfException":{"name":"org.apache.hadoop.mapred.InvalidJobConfException","methods":{}},"org.apache.hadoop.mapred.SequenceFileRecordReader":{"name":"org.apache.hadoop.mapred.SequenceFileRecordReader","methods":{"java.lang.Class getKeyClass()":{"name":"getKeyClass","returnType":"java.lang.Class","args":[],"exceptions":[]},"long getPos() throws java.io.IOException":{"name":"getPos","returnType":"long","args":[],"exceptions":["java.io.IOException"]},"float getProgress() throws java.io.IOException":{"name":"getProgress","returnType":"float","args":[],"exceptions":["java.io.IOException"]},"java.lang.Object createKey()":{"name":"createKey","returnType":"java.lang.Object","args":[],"exceptions":[]},"java.lang.Object createValue()":{"name":"createValue","returnType":"java.lang.Object","args":[],"exceptions":[]},"boolean next(java.lang.Object, java.lang.Object) throws java.io.IOException":{"name":"next","returnType":"boolean","args":["java.lang.Object","java.lang.Object"],"exceptions":["java.io.IOException"]},"void close() throws java.io.IOException":{"name":"close","returnType":"void","args":[],"exceptions":["java.io.IOException"]},"java.lang.Class getValueClass()":{"name":"getValueClass","returnType":"java.lang.Class","args":[],"exceptions":[]}}},"org.apache.hadoop.mapred.join.InnerJoinRecordReader":{"name":"org.apache.hadoop.mapred.join.InnerJoinRecordReader","methods":{}},"org.apache.hadoop.mapred.lib.MultipleOutputs":{"name":"org.apache.hadoop.mapred.lib.MultipleOutputs","methods":{"boolean getCountersEnabled(org.apache.hadoop.mapred.JobConf)":{"name":"getCountersEnabled","returnType":"boolean","args":["org.apache.hadoop.mapred.JobConf"],"exceptions":[]},"org.apache.hadoop.mapred.OutputCollector getCollector(java.lang.String, java.lang.String, org.apache.hadoop.mapred.Reporter) throws java.io.IOException":{"name":"getCollector","returnType":"org.apache.hadoop.mapred.OutputCollector","args":["java.lang.String","java.lang.String","org.apache.hadoop.mapred.Reporter"],"exceptions":["java.io.IOException"]},"java.lang.Class getNamedOutputKeyClass(org.apache.hadoop.mapred.JobConf, java.lang.String)":{"name":"getNamedOutputKeyClass","returnType":"java.lang.Class","args":["org.apache.hadoop.mapred.JobConf","java.lang.String"],"exceptions":[]},"void setCountersEnabled(org.apache.hadoop.mapred.JobConf, boolean)":{"name":"setCountersEnabled","returnType":"void","args":["org.apache.hadoop.mapred.JobConf","boolean"],"exceptions":[]},"boolean isMultiNamedOutput(org.apache.hadoop.mapred.JobConf, java.lang.String)":{"name":"isMultiNamedOutput","returnType":"boolean","args":["org.apache.hadoop.mapred.JobConf","java.lang.String"],"exceptions":[]},"java.lang.Class getNamedOutputFormatClass(org.apache.hadoop.mapred.JobConf, java.lang.String)":{"name":"getNamedOutputFormatClass","returnType":"java.lang.Class","args":["org.apache.hadoop.mapred.JobConf","java.lang.String"],"exceptions":[]},"java.util.Iterator getNamedOutputs()":{"name":"getNamedOutputs","returnType":"java.util.Iterator","args":[],"exceptions":[]},"java.util.List getNamedOutputsList(org.apache.hadoop.mapred.JobConf)":{"name":"getNamedOutputsList","returnType":"java.util.List","args":["org.apache.hadoop.mapred.JobConf"],"exceptions":[]},"org.apache.hadoop.mapred.OutputCollector getCollector(java.lang.String, org.apache.hadoop.mapred.Reporter) throws java.io.IOException":{"name":"getCollector","returnType":"org.apache.hadoop.mapred.OutputCollector","args":["java.lang.String","org.apache.hadoop.mapred.Reporter"],"exceptions":["java.io.IOException"]},"void addMultiNamedOutput(org.apache.hadoop.mapred.JobConf, java.lang.String, java.lang.Class, java.lang.Class, java.lang.Class)":{"name":"addMultiNamedOutput","returnType":"void","args":["org.apache.hadoop.mapred.JobConf","java.lang.String","java.lang.Class","java.lang.Class","java.lang.Class"],"exceptions":[]},"void addNamedOutput(org.apache.hadoop.mapred.JobConf, java.lang.String, java.lang.Class, java.lang.Class, java.lang.Class)":{"name":"addNamedOutput","returnType":"void","args":["org.apache.hadoop.mapred.JobConf","java.lang.String","java.lang.Class","java.lang.Class","java.lang.Class"],"exceptions":[]},"void close() throws java.io.IOException":{"name":"close","returnType":"void","args":[],"exceptions":["java.io.IOException"]},"java.lang.Class getNamedOutputValueClass(org.apache.hadoop.mapred.JobConf, java.lang.String)":{"name":"getNamedOutputValueClass","returnType":"java.lang.Class","args":["org.apache.hadoop.mapred.JobConf","java.lang.String"],"exceptions":[]}}},"org.apache.hadoop.mapreduce.lib.partition.TotalOrderPartitioner":{"name":"org.apache.hadoop.mapreduce.lib.partition.TotalOrderPartitioner","methods":{"org.apache.hadoop.conf.Configuration getConf()":{"name":"getConf","returnType":"org.apache.hadoop.conf.Configuration","args":[],"exceptions":[]},"void setConf(org.apache.hadoop.conf.Configuration)":{"name":"setConf","returnType":"void","args":["org.apache.hadoop.conf.Configuration"],"exceptions":[]},"int getPartition(java.lang.Object, java.lang.Object, int)":{"name":"getPartition","returnType":"int","args":["java.lang.Object","java.lang.Object","int"],"exceptions":[]},"java.lang.String getPartitionFile(org.apache.hadoop.conf.Configuration)":{"name":"getPartitionFile","returnType":"java.lang.String","args":["org.apache.hadoop.conf.Configuration"],"exceptions":[]},"void setPartitionFile(org.apache.hadoop.conf.Configuration, org.apache.hadoop.fs.Path)":{"name":"setPartitionFile","returnType":"void","args":["org.apache.hadoop.conf.Configuration","org.apache.hadoop.fs.Path"],"exceptions":[]},"int getPartition(org.apache.hadoop.io.WritableComparable, java.lang.Object, int)":{"name":"getPartition","returnType":"int","args":["org.apache.hadoop.io.WritableComparable","java.lang.Object","int"],"exceptions":[]}}},"org.apache.hadoop.mapred.MultiFileInputFormat":{"name":"org.apache.hadoop.mapred.MultiFileInputFormat","methods":{"org.apache.hadoop.mapred.RecordReader getRecordReader(org.apache.hadoop.mapred.InputSplit, org.apache.hadoop.mapred.JobConf, org.apache.hadoop.mapred.Reporter) throws java.io.IOException":{"name":"getRecordReader","returnType":"org.apache.hadoop.mapred.RecordReader","args":["org.apache.hadoop.mapred.InputSplit","org.apache.hadoop.mapred.JobConf","org.apache.hadoop.mapred.Reporter"],"exceptions":["java.io.IOException"]},"[Lorg.apache.hadoop.mapred.InputSplit; getSplits(org.apache.hadoop.mapred.JobConf, int) throws java.io.IOException":{"name":"getSplits","returnType":"[Lorg.apache.hadoop.mapred.InputSplit;","args":["org.apache.hadoop.mapred.JobConf","int"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.mapreduce.lib.output.SequenceFileOutputFormat":{"name":"org.apache.hadoop.mapreduce.lib.output.SequenceFileOutputFormat","methods":{"org.apache.hadoop.io.SequenceFile$CompressionType getOutputCompressionType(org.apache.hadoop.mapreduce.JobContext)":{"name":"getOutputCompressionType","returnType":"org.apache.hadoop.io.SequenceFile$CompressionType","args":["org.apache.hadoop.mapreduce.JobContext"],"exceptions":[]},"org.apache.hadoop.mapreduce.RecordWriter getRecordWriter(org.apache.hadoop.mapreduce.TaskAttemptContext) throws java.lang.InterruptedException, java.io.IOException":{"name":"getRecordWriter","returnType":"org.apache.hadoop.mapreduce.RecordWriter","args":["org.apache.hadoop.mapreduce.TaskAttemptContext"],"exceptions":["java.lang.InterruptedException","java.io.IOException"]},"void setOutputCompressionType(org.apache.hadoop.mapreduce.Job, org.apache.hadoop.io.SequenceFile$CompressionType)":{"name":"setOutputCompressionType","returnType":"void","args":["org.apache.hadoop.mapreduce.Job","org.apache.hadoop.io.SequenceFile$CompressionType"],"exceptions":[]}}},"org.apache.hadoop.mapred.Counters":{"name":"org.apache.hadoop.mapred.Counters","methods":{"void log(org.apache.commons.logging.Log)":{"name":"log","returnType":"void","args":["org.apache.commons.logging.Log"],"exceptions":[]},"void incrAllCounters(org.apache.hadoop.mapred.Counters)":{"name":"incrAllCounters","returnType":"void","args":["org.apache.hadoop.mapred.Counters"],"exceptions":[]},"org.apache.hadoop.mapred.Counters$Group getGroup(java.lang.String)":{"name":"getGroup","returnType":"org.apache.hadoop.mapred.Counters$Group","args":["java.lang.String"],"exceptions":[]},"void incrCounter(java.lang.String, java.lang.String, long)":{"name":"incrCounter","returnType":"void","args":["java.lang.String","java.lang.String","long"],"exceptions":[]},"org.apache.hadoop.mapred.Counters$Counter findCounter(java.lang.String, java.lang.String)":{"name":"findCounter","returnType":"org.apache.hadoop.mapred.Counters$Counter","args":["java.lang.String","java.lang.String"],"exceptions":[]},"int size()":{"name":"size","returnType":"int","args":[],"exceptions":[]},"java.lang.String makeEscapedCompactString()":{"name":"makeEscapedCompactString","returnType":"java.lang.String","args":[],"exceptions":[]},"void incrCounter(java.lang.Enum, long)":{"name":"incrCounter","returnType":"void","args":["java.lang.Enum","long"],"exceptions":[]},"org.apache.hadoop.mapreduce.Counter findCounter(java.lang.String, java.lang.String)":{"name":"findCounter","returnType":"org.apache.hadoop.mapreduce.Counter","args":["java.lang.String","java.lang.String"],"exceptions":[]},"org.apache.hadoop.mapred.Counters sum(org.apache.hadoop.mapred.Counters, org.apache.hadoop.mapred.Counters)":{"name":"sum","returnType":"org.apache.hadoop.mapred.Counters","args":["org.apache.hadoop.mapred.Counters","org.apache.hadoop.mapred.Counters"],"exceptions":[]},"org.apache.hadoop.mapred.Counters$Counter findCounter(java.lang.String, int, java.lang.String)":{"name":"findCounter","returnType":"org.apache.hadoop.mapred.Counters$Counter","args":["java.lang.String","int","java.lang.String"],"exceptions":[]},"org.apache.hadoop.mapreduce.counters.CounterGroupBase getGroup(java.lang.String)":{"name":"getGroup","returnType":"org.apache.hadoop.mapreduce.counters.CounterGroupBase","args":["java.lang.String"],"exceptions":[]},"java.lang.String makeCompactString()":{"name":"makeCompactString","returnType":"java.lang.String","args":[],"exceptions":[]},"org.apache.hadoop.mapred.Counters fromEscapedCompactString(java.lang.String) throws java.text.ParseException":{"name":"fromEscapedCompactString","returnType":"org.apache.hadoop.mapred.Counters","args":["java.lang.String"],"exceptions":["java.text.ParseException"]},"long getCounter(java.lang.Enum)":{"name":"getCounter","returnType":"long","args":["java.lang.Enum"],"exceptions":[]},"java.util.Collection getGroupNames()":{"name":"getGroupNames","returnType":"java.util.Collection","args":[],"exceptions":[]},"java.lang.Iterable getGroupNames()":{"name":"getGroupNames","returnType":"java.lang.Iterable","args":[],"exceptions":[]}}},"org.apache.hadoop.mapred.join.ResetableIterator":{"name":"org.apache.hadoop.mapred.join.ResetableIterator","methods":{}},"org.apache.hadoop.mapreduce.lib.input.CombineTextInputFormat":{"name":"org.apache.hadoop.mapreduce.lib.input.CombineTextInputFormat","methods":{"org.apache.hadoop.mapreduce.RecordReader createRecordReader(org.apache.hadoop.mapreduce.InputSplit, org.apache.hadoop.mapreduce.TaskAttemptContext) throws java.io.IOException":{"name":"createRecordReader","returnType":"org.apache.hadoop.mapreduce.RecordReader","args":["org.apache.hadoop.mapreduce.InputSplit","org.apache.hadoop.mapreduce.TaskAttemptContext"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.mapred.KeyValueTextInputFormat":{"name":"org.apache.hadoop.mapred.KeyValueTextInputFormat","methods":{"org.apache.hadoop.mapred.RecordReader getRecordReader(org.apache.hadoop.mapred.InputSplit, org.apache.hadoop.mapred.JobConf, org.apache.hadoop.mapred.Reporter) throws java.io.IOException":{"name":"getRecordReader","returnType":"org.apache.hadoop.mapred.RecordReader","args":["org.apache.hadoop.mapred.InputSplit","org.apache.hadoop.mapred.JobConf","org.apache.hadoop.mapred.Reporter"],"exceptions":["java.io.IOException"]},"void configure(org.apache.hadoop.mapred.JobConf)":{"name":"configure","returnType":"void","args":["org.apache.hadoop.mapred.JobConf"],"exceptions":[]}}},"org.apache.hadoop.mapreduce.lib.input.SequenceFileAsBinaryInputFormat":{"name":"org.apache.hadoop.mapreduce.lib.input.SequenceFileAsBinaryInputFormat","methods":{"org.apache.hadoop.mapreduce.RecordReader createRecordReader(org.apache.hadoop.mapreduce.InputSplit, org.apache.hadoop.mapreduce.TaskAttemptContext) throws java.io.IOException":{"name":"createRecordReader","returnType":"org.apache.hadoop.mapreduce.RecordReader","args":["org.apache.hadoop.mapreduce.InputSplit","org.apache.hadoop.mapreduce.TaskAttemptContext"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.mapred.join.ArrayListBackedIterator":{"name":"org.apache.hadoop.mapred.join.ArrayListBackedIterator","methods":{}},"org.apache.hadoop.mapred.lib.db.DBConfiguration":{"name":"org.apache.hadoop.mapred.lib.db.DBConfiguration","methods":{"void configureDB(org.apache.hadoop.mapred.JobConf, java.lang.String, java.lang.String, java.lang.String, java.lang.String)":{"name":"configureDB","returnType":"void","args":["org.apache.hadoop.mapred.JobConf","java.lang.String","java.lang.String","java.lang.String","java.lang.String"],"exceptions":[]},"void configureDB(org.apache.hadoop.mapred.JobConf, java.lang.String, java.lang.String)":{"name":"configureDB","returnType":"void","args":["org.apache.hadoop.mapred.JobConf","java.lang.String","java.lang.String"],"exceptions":[]}}},"org.apache.hadoop.mapred.lib.KeyFieldBasedComparator":{"name":"org.apache.hadoop.mapred.lib.KeyFieldBasedComparator","methods":{"void configure(org.apache.hadoop.mapred.JobConf)":{"name":"configure","returnType":"void","args":["org.apache.hadoop.mapred.JobConf"],"exceptions":[]}}},"org.apache.hadoop.mapreduce.lib.input.CombineFileRecordReaderWrapper":{"name":"org.apache.hadoop.mapreduce.lib.input.CombineFileRecordReaderWrapper","methods":{"java.lang.Object getCurrentValue() throws java.lang.InterruptedException, java.io.IOException":{"name":"getCurrentValue","returnType":"java.lang.Object","args":[],"exceptions":["java.lang.InterruptedException","java.io.IOException"]},"void initialize(org.apache.hadoop.mapreduce.InputSplit, org.apache.hadoop.mapreduce.TaskAttemptContext) throws java.lang.InterruptedException, java.io.IOException":{"name":"initialize","returnType":"void","args":["org.apache.hadoop.mapreduce.InputSplit","org.apache.hadoop.mapreduce.TaskAttemptContext"],"exceptions":["java.lang.InterruptedException","java.io.IOException"]},"boolean nextKeyValue() throws java.lang.InterruptedException, java.io.IOException":{"name":"nextKeyValue","returnType":"boolean","args":[],"exceptions":["java.lang.InterruptedException","java.io.IOException"]},"float getProgress() throws java.lang.InterruptedException, java.io.IOException":{"name":"getProgress","returnType":"float","args":[],"exceptions":["java.lang.InterruptedException","java.io.IOException"]},"java.lang.Object getCurrentKey() throws java.lang.InterruptedException, java.io.IOException":{"name":"getCurrentKey","returnType":"java.lang.Object","args":[],"exceptions":["java.lang.InterruptedException","java.io.IOException"]},"void close() throws java.io.IOException":{"name":"close","returnType":"void","args":[],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.mapred.TaskReport":{"name":"org.apache.hadoop.mapred.TaskReport","methods":{"org.apache.hadoop.mapred.TaskAttemptID getSuccessfulTaskAttempt()":{"name":"getSuccessfulTaskAttempt","returnType":"org.apache.hadoop.mapred.TaskAttemptID","args":[],"exceptions":[]},"java.util.Collection getRunningTaskAttempts()":{"name":"getRunningTaskAttempts","returnType":"java.util.Collection","args":[],"exceptions":[]},"org.apache.hadoop.mapred.Counters getCounters()":{"name":"getCounters","returnType":"org.apache.hadoop.mapred.Counters","args":[],"exceptions":[]},"org.apache.hadoop.mapred.TaskID getTaskID()":{"name":"getTaskID","returnType":"org.apache.hadoop.mapred.TaskID","args":[],"exceptions":[]},"void setSuccessfulAttempt(org.apache.hadoop.mapred.TaskAttemptID)":{"name":"setSuccessfulAttempt","returnType":"void","args":["org.apache.hadoop.mapred.TaskAttemptID"],"exceptions":[]},"void setRunningTaskAttempts(java.util.Collection)":{"name":"setRunningTaskAttempts","returnType":"void","args":["java.util.Collection"],"exceptions":[]},"java.lang.String getTaskId()":{"name":"getTaskId","returnType":"java.lang.String","args":[],"exceptions":[]}}},"org.apache.hadoop.mapreduce.lib.map.TokenCounterMapper":{"name":"org.apache.hadoop.mapreduce.lib.map.TokenCounterMapper","methods":{"void map(java.lang.Object, java.lang.Object, org.apache.hadoop.mapreduce.Mapper$Context) throws java.lang.InterruptedException, java.io.IOException":{"name":"map","returnType":"void","args":["java.lang.Object","java.lang.Object","org.apache.hadoop.mapreduce.Mapper$Context"],"exceptions":["java.lang.InterruptedException","java.io.IOException"]},"void map(java.lang.Object, org.apache.hadoop.io.Text, org.apache.hadoop.mapreduce.Mapper$Context) throws java.lang.InterruptedException, java.io.IOException":{"name":"map","returnType":"void","args":["java.lang.Object","org.apache.hadoop.io.Text","org.apache.hadoop.mapreduce.Mapper$Context"],"exceptions":["java.lang.InterruptedException","java.io.IOException"]}}},"org.apache.hadoop.mapreduce.lib.output.TextOutputFormat":{"name":"org.apache.hadoop.mapreduce.lib.output.TextOutputFormat","methods":{"org.apache.hadoop.mapreduce.RecordWriter getRecordWriter(org.apache.hadoop.mapreduce.TaskAttemptContext) throws java.lang.InterruptedException, java.io.IOException":{"name":"getRecordWriter","returnType":"org.apache.hadoop.mapreduce.RecordWriter","args":["org.apache.hadoop.mapreduce.TaskAttemptContext"],"exceptions":["java.lang.InterruptedException","java.io.IOException"]}}},"org.apache.hadoop.mapreduce.lib.aggregate.ValueAggregatorJobBase":{"name":"org.apache.hadoop.mapreduce.lib.aggregate.ValueAggregatorJobBase","methods":{"void setup(org.apache.hadoop.conf.Configuration)":{"name":"setup","returnType":"void","args":["org.apache.hadoop.conf.Configuration"],"exceptions":[]}}},"org.apache.hadoop.mapred.lib.NullOutputFormat":{"name":"org.apache.hadoop.mapred.lib.NullOutputFormat","methods":{"org.apache.hadoop.mapred.RecordWriter getRecordWriter(org.apache.hadoop.fs.FileSystem, org.apache.hadoop.mapred.JobConf, java.lang.String, org.apache.hadoop.util.Progressable)":{"name":"getRecordWriter","returnType":"org.apache.hadoop.mapred.RecordWriter","args":["org.apache.hadoop.fs.FileSystem","org.apache.hadoop.mapred.JobConf","java.lang.String","org.apache.hadoop.util.Progressable"],"exceptions":[]},"void checkOutputSpecs(org.apache.hadoop.fs.FileSystem, org.apache.hadoop.mapred.JobConf)":{"name":"checkOutputSpecs","returnType":"void","args":["org.apache.hadoop.fs.FileSystem","org.apache.hadoop.mapred.JobConf"],"exceptions":[]}}},"org.apache.hadoop.mapred.InvalidFileTypeException":{"name":"org.apache.hadoop.mapred.InvalidFileTypeException","methods":{}},"org.apache.hadoop.mapreduce.tools.CLI":{"name":"org.apache.hadoop.mapreduce.tools.CLI","methods":{"void displayJobList([Lorg.apache.hadoop.mapreduce.JobStatus;, java.io.PrintWriter)":{"name":"displayJobList","returnType":"void","args":["[Lorg.apache.hadoop.mapreduce.JobStatus;","java.io.PrintWriter"],"exceptions":[]},"void displayJobList([Lorg.apache.hadoop.mapreduce.JobStatus;) throws java.lang.InterruptedException, java.io.IOException":{"name":"displayJobList","returnType":"void","args":["[Lorg.apache.hadoop.mapreduce.JobStatus;"],"exceptions":["java.lang.InterruptedException","java.io.IOException"]},"void main([Ljava.lang.String;) throws java.lang.Exception":{"name":"main","returnType":"void","args":["[Ljava.lang.String;"],"exceptions":["java.lang.Exception"]},"int run([Ljava.lang.String;) throws java.lang.Exception":{"name":"run","returnType":"int","args":["[Ljava.lang.String;"],"exceptions":["java.lang.Exception"]}}},"org.apache.hadoop.mapred.lib.db.DBInputFormat":{"name":"org.apache.hadoop.mapred.lib.db.DBInputFormat","methods":{"org.apache.hadoop.mapred.RecordReader getRecordReader(org.apache.hadoop.mapred.InputSplit, org.apache.hadoop.mapred.JobConf, org.apache.hadoop.mapred.Reporter) throws java.io.IOException":{"name":"getRecordReader","returnType":"org.apache.hadoop.mapred.RecordReader","args":["org.apache.hadoop.mapred.InputSplit","org.apache.hadoop.mapred.JobConf","org.apache.hadoop.mapred.Reporter"],"exceptions":["java.io.IOException"]},"void setInput(org.apache.hadoop.mapred.JobConf, java.lang.Class, java.lang.String, java.lang.String, java.lang.String, [Ljava.lang.String;)":{"name":"setInput","returnType":"void","args":["org.apache.hadoop.mapred.JobConf","java.lang.Class","java.lang.String","java.lang.String","java.lang.String","[Ljava.lang.String;"],"exceptions":[]},"void setInput(org.apache.hadoop.mapred.JobConf, java.lang.Class, java.lang.String, java.lang.String)":{"name":"setInput","returnType":"void","args":["org.apache.hadoop.mapred.JobConf","java.lang.Class","java.lang.String","java.lang.String"],"exceptions":[]},"void configure(org.apache.hadoop.mapred.JobConf)":{"name":"configure","returnType":"void","args":["org.apache.hadoop.mapred.JobConf"],"exceptions":[]},"[Lorg.apache.hadoop.mapred.InputSplit; getSplits(org.apache.hadoop.mapred.JobConf, int) throws java.io.IOException":{"name":"getSplits","returnType":"[Lorg.apache.hadoop.mapred.InputSplit;","args":["org.apache.hadoop.mapred.JobConf","int"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.mapreduce.lib.join.ComposableInputFormat":{"name":"org.apache.hadoop.mapreduce.lib.join.ComposableInputFormat","methods":{"org.apache.hadoop.mapreduce.RecordReader createRecordReader(org.apache.hadoop.mapreduce.InputSplit, org.apache.hadoop.mapreduce.TaskAttemptContext) throws java.lang.InterruptedException, java.io.IOException":{"name":"createRecordReader","returnType":"org.apache.hadoop.mapreduce.RecordReader","args":["org.apache.hadoop.mapreduce.InputSplit","org.apache.hadoop.mapreduce.TaskAttemptContext"],"exceptions":["java.lang.InterruptedException","java.io.IOException"]},"org.apache.hadoop.mapreduce.lib.join.ComposableRecordReader createRecordReader(org.apache.hadoop.mapreduce.InputSplit, org.apache.hadoop.mapreduce.TaskAttemptContext) throws java.lang.InterruptedException, java.io.IOException":{"name":"createRecordReader","returnType":"org.apache.hadoop.mapreduce.lib.join.ComposableRecordReader","args":["org.apache.hadoop.mapreduce.InputSplit","org.apache.hadoop.mapreduce.TaskAttemptContext"],"exceptions":["java.lang.InterruptedException","java.io.IOException"]}}},"org.apache.hadoop.mapred.Reporter":{"name":"org.apache.hadoop.mapred.Reporter","methods":{"void incrCounter(java.lang.Enum, long)":{"name":"incrCounter","returnType":"void","args":["java.lang.Enum","long"],"exceptions":[]},"void incrCounter(java.lang.String, java.lang.String, long)":{"name":"incrCounter","returnType":"void","args":["java.lang.String","java.lang.String","long"],"exceptions":[]},"org.apache.hadoop.mapred.Counters$Counter getCounter(java.lang.String, java.lang.String)":{"name":"getCounter","returnType":"org.apache.hadoop.mapred.Counters$Counter","args":["java.lang.String","java.lang.String"],"exceptions":[]},"org.apache.hadoop.mapred.Counters$Counter getCounter(java.lang.Enum)":{"name":"getCounter","returnType":"org.apache.hadoop.mapred.Counters$Counter","args":["java.lang.Enum"],"exceptions":[]},"org.apache.hadoop.mapred.InputSplit getInputSplit() throws java.lang.UnsupportedOperationException":{"name":"getInputSplit","returnType":"org.apache.hadoop.mapred.InputSplit","args":[],"exceptions":["java.lang.UnsupportedOperationException"]},"float getProgress()":{"name":"getProgress","returnType":"float","args":[],"exceptions":[]},"void setStatus(java.lang.String)":{"name":"setStatus","returnType":"void","args":["java.lang.String"],"exceptions":[]}}},"org.apache.hadoop.mapreduce.InputFormat":{"name":"org.apache.hadoop.mapreduce.InputFormat","methods":{"org.apache.hadoop.mapreduce.RecordReader createRecordReader(org.apache.hadoop.mapreduce.InputSplit, org.apache.hadoop.mapreduce.TaskAttemptContext) throws java.lang.InterruptedException, java.io.IOException":{"name":"createRecordReader","returnType":"org.apache.hadoop.mapreduce.RecordReader","args":["org.apache.hadoop.mapreduce.InputSplit","org.apache.hadoop.mapreduce.TaskAttemptContext"],"exceptions":["java.lang.InterruptedException","java.io.IOException"]},"java.util.List getSplits(org.apache.hadoop.mapreduce.JobContext) throws java.lang.InterruptedException, java.io.IOException":{"name":"getSplits","returnType":"java.util.List","args":["org.apache.hadoop.mapreduce.JobContext"],"exceptions":["java.lang.InterruptedException","java.io.IOException"]}}},"org.apache.hadoop.mapred.lib.InputSampler":{"name":"org.apache.hadoop.mapred.lib.InputSampler","methods":{"void writePartitionFile(org.apache.hadoop.mapred.JobConf, org.apache.hadoop.mapred.lib.InputSampler$Sampler) throws java.lang.InterruptedException, java.io.IOException, java.lang.ClassNotFoundException":{"name":"writePartitionFile","returnType":"void","args":["org.apache.hadoop.mapred.JobConf","org.apache.hadoop.mapred.lib.InputSampler$Sampler"],"exceptions":["java.lang.InterruptedException","java.io.IOException","java.lang.ClassNotFoundException"]}}},"org.apache.hadoop.mapred.lib.aggregate.ValueAggregatorCombiner":{"name":"org.apache.hadoop.mapred.lib.aggregate.ValueAggregatorCombiner","methods":{"void map(java.lang.Object, java.lang.Object, org.apache.hadoop.mapred.OutputCollector, org.apache.hadoop.mapred.Reporter) throws java.io.IOException":{"name":"map","returnType":"void","args":["java.lang.Object","java.lang.Object","org.apache.hadoop.mapred.OutputCollector","org.apache.hadoop.mapred.Reporter"],"exceptions":["java.io.IOException"]},"void reduce(java.lang.Object, java.util.Iterator, org.apache.hadoop.mapred.OutputCollector, org.apache.hadoop.mapred.Reporter) throws java.io.IOException":{"name":"reduce","returnType":"void","args":["java.lang.Object","java.util.Iterator","org.apache.hadoop.mapred.OutputCollector","org.apache.hadoop.mapred.Reporter"],"exceptions":["java.io.IOException"]},"void map(org.apache.hadoop.io.WritableComparable, org.apache.hadoop.io.Writable, org.apache.hadoop.mapred.OutputCollector, org.apache.hadoop.mapred.Reporter) throws java.io.IOException":{"name":"map","returnType":"void","args":["org.apache.hadoop.io.WritableComparable","org.apache.hadoop.io.Writable","org.apache.hadoop.mapred.OutputCollector","org.apache.hadoop.mapred.Reporter"],"exceptions":["java.io.IOException"]},"void reduce(org.apache.hadoop.io.Text, java.util.Iterator, org.apache.hadoop.mapred.OutputCollector, org.apache.hadoop.mapred.Reporter) throws java.io.IOException":{"name":"reduce","returnType":"void","args":["org.apache.hadoop.io.Text","java.util.Iterator","org.apache.hadoop.mapred.OutputCollector","org.apache.hadoop.mapred.Reporter"],"exceptions":["java.io.IOException"]},"void configure(org.apache.hadoop.mapred.JobConf)":{"name":"configure","returnType":"void","args":["org.apache.hadoop.mapred.JobConf"],"exceptions":[]},"void close() throws java.io.IOException":{"name":"close","returnType":"void","args":[],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.mapreduce.lib.aggregate.StringValueMax":{"name":"org.apache.hadoop.mapreduce.lib.aggregate.StringValueMax","methods":{"void reset()":{"name":"reset","returnType":"void","args":[],"exceptions":[]},"java.lang.String getVal()":{"name":"getVal","returnType":"java.lang.String","args":[],"exceptions":[]},"java.util.ArrayList getCombinerOutput()":{"name":"getCombinerOutput","returnType":"java.util.ArrayList","args":[],"exceptions":[]},"java.lang.String getReport()":{"name":"getReport","returnType":"java.lang.String","args":[],"exceptions":[]},"void addNextValue(java.lang.Object)":{"name":"addNextValue","returnType":"void","args":["java.lang.Object"],"exceptions":[]}}},"org.apache.hadoop.mapreduce.lib.join.ArrayListBackedIterator":{"name":"org.apache.hadoop.mapreduce.lib.join.ArrayListBackedIterator","methods":{"void add(org.apache.hadoop.io.Writable) throws java.io.IOException":{"name":"add","returnType":"void","args":["org.apache.hadoop.io.Writable"],"exceptions":["java.io.IOException"]},"boolean next(org.apache.hadoop.io.Writable) throws java.io.IOException":{"name":"next","returnType":"boolean","args":["org.apache.hadoop.io.Writable"],"exceptions":["java.io.IOException"]},"void reset()":{"name":"reset","returnType":"void","args":[],"exceptions":[]},"boolean replay(org.apache.hadoop.io.Writable) throws java.io.IOException":{"name":"replay","returnType":"boolean","args":["org.apache.hadoop.io.Writable"],"exceptions":["java.io.IOException"]},"boolean hasNext()":{"name":"hasNext","returnType":"boolean","args":[],"exceptions":[]},"void close() throws java.io.IOException":{"name":"close","returnType":"void","args":[],"exceptions":["java.io.IOException"]},"void clear()":{"name":"clear","returnType":"void","args":[],"exceptions":[]}}},"org.apache.hadoop.mapred.JobPriority":{"name":"org.apache.hadoop.mapred.JobPriority","methods":{"[Lorg.apache.hadoop.mapred.JobPriority; values()":{"name":"values","returnType":"[Lorg.apache.hadoop.mapred.JobPriority;","args":[],"exceptions":[]},"org.apache.hadoop.mapred.JobPriority valueOf(java.lang.String)":{"name":"valueOf","returnType":"org.apache.hadoop.mapred.JobPriority","args":["java.lang.String"],"exceptions":[]}}},"org.apache.hadoop.mapreduce.lib.output.FileOutputFormat":{"name":"org.apache.hadoop.mapreduce.lib.output.FileOutputFormat","methods":{"boolean getCompressOutput(org.apache.hadoop.mapreduce.JobContext)":{"name":"getCompressOutput","returnType":"boolean","args":["org.apache.hadoop.mapreduce.JobContext"],"exceptions":[]},"void checkOutputSpecs(org.apache.hadoop.mapreduce.JobContext) throws java.io.IOException, org.apache.hadoop.mapred.FileAlreadyExistsException":{"name":"checkOutputSpecs","returnType":"void","args":["org.apache.hadoop.mapreduce.JobContext"],"exceptions":["java.io.IOException","org.apache.hadoop.mapred.FileAlreadyExistsException"]},"org.apache.hadoop.fs.Path getWorkOutputPath(org.apache.hadoop.mapreduce.TaskInputOutputContext) throws java.lang.InterruptedException, java.io.IOException":{"name":"getWorkOutputPath","returnType":"org.apache.hadoop.fs.Path","args":["org.apache.hadoop.mapreduce.TaskInputOutputContext"],"exceptions":["java.lang.InterruptedException","java.io.IOException"]},"void setOutputPath(org.apache.hadoop.mapreduce.Job, org.apache.hadoop.fs.Path)":{"name":"setOutputPath","returnType":"void","args":["org.apache.hadoop.mapreduce.Job","org.apache.hadoop.fs.Path"],"exceptions":[]},"void setOutputCompressorClass(org.apache.hadoop.mapreduce.Job, java.lang.Class)":{"name":"setOutputCompressorClass","returnType":"void","args":["org.apache.hadoop.mapreduce.Job","java.lang.Class"],"exceptions":[]},"org.apache.hadoop.fs.Path getPathForWorkFile(org.apache.hadoop.mapreduce.TaskInputOutputContext, java.lang.String, java.lang.String) throws java.lang.InterruptedException, java.io.IOException":{"name":"getPathForWorkFile","returnType":"org.apache.hadoop.fs.Path","args":["org.apache.hadoop.mapreduce.TaskInputOutputContext","java.lang.String","java.lang.String"],"exceptions":["java.lang.InterruptedException","java.io.IOException"]},"java.lang.String getUniqueFile(org.apache.hadoop.mapreduce.TaskAttemptContext, java.lang.String, java.lang.String)":{"name":"getUniqueFile","returnType":"java.lang.String","args":["org.apache.hadoop.mapreduce.TaskAttemptContext","java.lang.String","java.lang.String"],"exceptions":[]},"org.apache.hadoop.fs.Path getOutputPath(org.apache.hadoop.mapreduce.JobContext)":{"name":"getOutputPath","returnType":"org.apache.hadoop.fs.Path","args":["org.apache.hadoop.mapreduce.JobContext"],"exceptions":[]},"org.apache.hadoop.mapreduce.OutputCommitter getOutputCommitter(org.apache.hadoop.mapreduce.TaskAttemptContext) throws java.io.IOException":{"name":"getOutputCommitter","returnType":"org.apache.hadoop.mapreduce.OutputCommitter","args":["org.apache.hadoop.mapreduce.TaskAttemptContext"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.mapreduce.RecordWriter getRecordWriter(org.apache.hadoop.mapreduce.TaskAttemptContext) throws java.lang.InterruptedException, java.io.IOException":{"name":"getRecordWriter","returnType":"org.apache.hadoop.mapreduce.RecordWriter","args":["org.apache.hadoop.mapreduce.TaskAttemptContext"],"exceptions":["java.lang.InterruptedException","java.io.IOException"]},"java.lang.Class getOutputCompressorClass(org.apache.hadoop.mapreduce.JobContext, java.lang.Class)":{"name":"getOutputCompressorClass","returnType":"java.lang.Class","args":["org.apache.hadoop.mapreduce.JobContext","java.lang.Class"],"exceptions":[]},"org.apache.hadoop.fs.Path getDefaultWorkFile(org.apache.hadoop.mapreduce.TaskAttemptContext, java.lang.String) throws java.io.IOException":{"name":"getDefaultWorkFile","returnType":"org.apache.hadoop.fs.Path","args":["org.apache.hadoop.mapreduce.TaskAttemptContext","java.lang.String"],"exceptions":["java.io.IOException"]},"void setCompressOutput(org.apache.hadoop.mapreduce.Job, boolean)":{"name":"setCompressOutput","returnType":"void","args":["org.apache.hadoop.mapreduce.Job","boolean"],"exceptions":[]}}},"org.apache.hadoop.mapreduce.lib.aggregate.ValueAggregatorReducer":{"name":"org.apache.hadoop.mapreduce.lib.aggregate.ValueAggregatorReducer","methods":{"void reduce(java.lang.Object, java.lang.Iterable, org.apache.hadoop.mapreduce.Reducer$Context) throws java.lang.InterruptedException, java.io.IOException":{"name":"reduce","returnType":"void","args":["java.lang.Object","java.lang.Iterable","org.apache.hadoop.mapreduce.Reducer$Context"],"exceptions":["java.lang.InterruptedException","java.io.IOException"]},"void reduce(org.apache.hadoop.io.Text, java.lang.Iterable, org.apache.hadoop.mapreduce.Reducer$Context) throws java.lang.InterruptedException, java.io.IOException":{"name":"reduce","returnType":"void","args":["org.apache.hadoop.io.Text","java.lang.Iterable","org.apache.hadoop.mapreduce.Reducer$Context"],"exceptions":["java.lang.InterruptedException","java.io.IOException"]},"void setup(org.apache.hadoop.mapreduce.Reducer$Context) throws java.lang.InterruptedException, java.io.IOException":{"name":"setup","returnType":"void","args":["org.apache.hadoop.mapreduce.Reducer$Context"],"exceptions":["java.lang.InterruptedException","java.io.IOException"]}}},"org.apache.hadoop.mapreduce.lib.input.CombineFileInputFormat":{"name":"org.apache.hadoop.mapreduce.lib.input.CombineFileInputFormat","methods":{"org.apache.hadoop.mapreduce.RecordReader createRecordReader(org.apache.hadoop.mapreduce.InputSplit, org.apache.hadoop.mapreduce.TaskAttemptContext) throws java.io.IOException":{"name":"createRecordReader","returnType":"org.apache.hadoop.mapreduce.RecordReader","args":["org.apache.hadoop.mapreduce.InputSplit","org.apache.hadoop.mapreduce.TaskAttemptContext"],"exceptions":["java.io.IOException"]},"java.util.List getSplits(org.apache.hadoop.mapreduce.JobContext) throws java.io.IOException":{"name":"getSplits","returnType":"java.util.List","args":["org.apache.hadoop.mapreduce.JobContext"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.mapred.join.CompositeRecordReader":{"name":"org.apache.hadoop.mapred.join.CompositeRecordReader","methods":{"int id()":{"name":"id","returnType":"int","args":[],"exceptions":[]},"long getPos() throws java.io.IOException":{"name":"getPos","returnType":"long","args":[],"exceptions":["java.io.IOException"]},"float getProgress() throws java.io.IOException":{"name":"getProgress","returnType":"float","args":[],"exceptions":["java.io.IOException"]},"org.apache.hadoop.conf.Configuration getConf()":{"name":"getConf","returnType":"org.apache.hadoop.conf.Configuration","args":[],"exceptions":[]},"void accept(org.apache.hadoop.mapred.join.CompositeRecordReader$JoinCollector, org.apache.hadoop.io.WritableComparable) throws java.io.IOException":{"name":"accept","returnType":"void","args":["org.apache.hadoop.mapred.join.CompositeRecordReader$JoinCollector","org.apache.hadoop.io.WritableComparable"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.io.WritableComparable key()":{"name":"key","returnType":"org.apache.hadoop.io.WritableComparable","args":[],"exceptions":[]},"void skip(org.apache.hadoop.io.WritableComparable) throws java.io.IOException":{"name":"skip","returnType":"void","args":["org.apache.hadoop.io.WritableComparable"],"exceptions":["java.io.IOException"]},"boolean hasNext()":{"name":"hasNext","returnType":"boolean","args":[],"exceptions":[]},"int compareTo(org.apache.hadoop.mapred.join.ComposableRecordReader)":{"name":"compareTo","returnType":"int","args":["org.apache.hadoop.mapred.join.ComposableRecordReader"],"exceptions":[]},"void add(org.apache.hadoop.mapred.join.ComposableRecordReader) throws java.io.IOException":{"name":"add","returnType":"void","args":["org.apache.hadoop.mapred.join.ComposableRecordReader"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.io.WritableComparable createKey()":{"name":"createKey","returnType":"org.apache.hadoop.io.WritableComparable","args":[],"exceptions":[]},"void setConf(org.apache.hadoop.conf.Configuration)":{"name":"setConf","returnType":"void","args":["org.apache.hadoop.conf.Configuration"],"exceptions":[]},"void close() throws java.io.IOException":{"name":"close","returnType":"void","args":[],"exceptions":["java.io.IOException"]},"void key(org.apache.hadoop.io.WritableComparable) throws java.io.IOException":{"name":"key","returnType":"void","args":["org.apache.hadoop.io.WritableComparable"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.mapreduce.lib.chain.ChainReducer":{"name":"org.apache.hadoop.mapreduce.lib.chain.ChainReducer","methods":{"void addMapper(org.apache.hadoop.mapreduce.Job, java.lang.Class, java.lang.Class, java.lang.Class, java.lang.Class, java.lang.Class, org.apache.hadoop.conf.Configuration) throws java.io.IOException":{"name":"addMapper","returnType":"void","args":["org.apache.hadoop.mapreduce.Job","java.lang.Class","java.lang.Class","java.lang.Class","java.lang.Class","java.lang.Class","org.apache.hadoop.conf.Configuration"],"exceptions":["java.io.IOException"]},"void setReducer(org.apache.hadoop.mapreduce.Job, java.lang.Class, java.lang.Class, java.lang.Class, java.lang.Class, java.lang.Class, org.apache.hadoop.conf.Configuration)":{"name":"setReducer","returnType":"void","args":["org.apache.hadoop.mapreduce.Job","java.lang.Class","java.lang.Class","java.lang.Class","java.lang.Class","java.lang.Class","org.apache.hadoop.conf.Configuration"],"exceptions":[]},"void run(org.apache.hadoop.mapreduce.Reducer$Context) throws java.lang.InterruptedException, java.io.IOException":{"name":"run","returnType":"void","args":["org.apache.hadoop.mapreduce.Reducer$Context"],"exceptions":["java.lang.InterruptedException","java.io.IOException"]}}},"org.apache.hadoop.mapreduce.ID":{"name":"org.apache.hadoop.mapreduce.ID","methods":{"int hashCode()":{"name":"hashCode","returnType":"int","args":[],"exceptions":[]},"int compareTo(org.apache.hadoop.mapreduce.ID)":{"name":"compareTo","returnType":"int","args":["org.apache.hadoop.mapreduce.ID"],"exceptions":[]},"java.lang.String toString()":{"name":"toString","returnType":"java.lang.String","args":[],"exceptions":[]},"void write(java.io.DataOutput) throws java.io.IOException":{"name":"write","returnType":"void","args":["java.io.DataOutput"],"exceptions":["java.io.IOException"]},"int getId()":{"name":"getId","returnType":"int","args":[],"exceptions":[]},"int compareTo(java.lang.Object)":{"name":"compareTo","returnType":"int","args":["java.lang.Object"],"exceptions":[]},"boolean equals(java.lang.Object)":{"name":"equals","returnType":"boolean","args":["java.lang.Object"],"exceptions":[]},"void readFields(java.io.DataInput) throws java.io.IOException":{"name":"readFields","returnType":"void","args":["java.io.DataInput"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.mapred.SequenceFileAsTextRecordReader":{"name":"org.apache.hadoop.mapred.SequenceFileAsTextRecordReader","methods":{"boolean next(org.apache.hadoop.io.Text, org.apache.hadoop.io.Text) throws java.io.IOException":{"name":"next","returnType":"boolean","args":["org.apache.hadoop.io.Text","org.apache.hadoop.io.Text"],"exceptions":["java.io.IOException"]},"long getPos() throws java.io.IOException":{"name":"getPos","returnType":"long","args":[],"exceptions":["java.io.IOException"]},"float getProgress() throws java.io.IOException":{"name":"getProgress","returnType":"float","args":[],"exceptions":["java.io.IOException"]},"java.lang.Object createKey()":{"name":"createKey","returnType":"java.lang.Object","args":[],"exceptions":[]},"java.lang.Object createValue()":{"name":"createValue","returnType":"java.lang.Object","args":[],"exceptions":[]},"org.apache.hadoop.io.Text createValue()":{"name":"createValue","returnType":"org.apache.hadoop.io.Text","args":[],"exceptions":[]},"org.apache.hadoop.io.Text createKey()":{"name":"createKey","returnType":"org.apache.hadoop.io.Text","args":[],"exceptions":[]},"boolean next(java.lang.Object, java.lang.Object) throws java.io.IOException":{"name":"next","returnType":"boolean","args":["java.lang.Object","java.lang.Object"],"exceptions":["java.io.IOException"]},"void close() throws java.io.IOException":{"name":"close","returnType":"void","args":[],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.mapreduce.lib.partition.HashPartitioner":{"name":"org.apache.hadoop.mapreduce.lib.partition.HashPartitioner","methods":{"int getPartition(java.lang.Object, java.lang.Object, int)":{"name":"getPartition","returnType":"int","args":["java.lang.Object","java.lang.Object","int"],"exceptions":[]}}},"org.apache.hadoop.mapred.lib.ChainReducer":{"name":"org.apache.hadoop.mapred.lib.ChainReducer","methods":{"void setReducer(org.apache.hadoop.mapred.JobConf, java.lang.Class, java.lang.Class, java.lang.Class, java.lang.Class, java.lang.Class, boolean, org.apache.hadoop.mapred.JobConf)":{"name":"setReducer","returnType":"void","args":["org.apache.hadoop.mapred.JobConf","java.lang.Class","java.lang.Class","java.lang.Class","java.lang.Class","java.lang.Class","boolean","org.apache.hadoop.mapred.JobConf"],"exceptions":[]},"void reduce(java.lang.Object, java.util.Iterator, org.apache.hadoop.mapred.OutputCollector, org.apache.hadoop.mapred.Reporter) throws java.io.IOException":{"name":"reduce","returnType":"void","args":["java.lang.Object","java.util.Iterator","org.apache.hadoop.mapred.OutputCollector","org.apache.hadoop.mapred.Reporter"],"exceptions":["java.io.IOException"]},"void addMapper(org.apache.hadoop.mapred.JobConf, java.lang.Class, java.lang.Class, java.lang.Class, java.lang.Class, java.lang.Class, boolean, org.apache.hadoop.mapred.JobConf)":{"name":"addMapper","returnType":"void","args":["org.apache.hadoop.mapred.JobConf","java.lang.Class","java.lang.Class","java.lang.Class","java.lang.Class","java.lang.Class","boolean","org.apache.hadoop.mapred.JobConf"],"exceptions":[]},"void configure(org.apache.hadoop.mapred.JobConf)":{"name":"configure","returnType":"void","args":["org.apache.hadoop.mapred.JobConf"],"exceptions":[]},"void close() throws java.io.IOException":{"name":"close","returnType":"void","args":[],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.mapred.lib.MultipleInputs":{"name":"org.apache.hadoop.mapred.lib.MultipleInputs","methods":{"void addInputPath(org.apache.hadoop.mapred.JobConf, org.apache.hadoop.fs.Path, java.lang.Class)":{"name":"addInputPath","returnType":"void","args":["org.apache.hadoop.mapred.JobConf","org.apache.hadoop.fs.Path","java.lang.Class"],"exceptions":[]},"void addInputPath(org.apache.hadoop.mapred.JobConf, org.apache.hadoop.fs.Path, java.lang.Class, java.lang.Class)":{"name":"addInputPath","returnType":"void","args":["org.apache.hadoop.mapred.JobConf","org.apache.hadoop.fs.Path","java.lang.Class","java.lang.Class"],"exceptions":[]}}},"org.apache.hadoop.mapred.SequenceFileOutputFormat":{"name":"org.apache.hadoop.mapred.SequenceFileOutputFormat","methods":{"[Lorg.apache.hadoop.io.SequenceFile$Reader; getReaders(org.apache.hadoop.conf.Configuration, org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"getReaders","returnType":"[Lorg.apache.hadoop.io.SequenceFile$Reader;","args":["org.apache.hadoop.conf.Configuration","org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"void setOutputCompressionType(org.apache.hadoop.mapred.JobConf, org.apache.hadoop.io.SequenceFile$CompressionType)":{"name":"setOutputCompressionType","returnType":"void","args":["org.apache.hadoop.mapred.JobConf","org.apache.hadoop.io.SequenceFile$CompressionType"],"exceptions":[]},"org.apache.hadoop.io.SequenceFile$CompressionType getOutputCompressionType(org.apache.hadoop.mapred.JobConf)":{"name":"getOutputCompressionType","returnType":"org.apache.hadoop.io.SequenceFile$CompressionType","args":["org.apache.hadoop.mapred.JobConf"],"exceptions":[]},"org.apache.hadoop.mapred.RecordWriter getRecordWriter(org.apache.hadoop.fs.FileSystem, org.apache.hadoop.mapred.JobConf, java.lang.String, org.apache.hadoop.util.Progressable) throws java.io.IOException":{"name":"getRecordWriter","returnType":"org.apache.hadoop.mapred.RecordWriter","args":["org.apache.hadoop.fs.FileSystem","org.apache.hadoop.mapred.JobConf","java.lang.String","org.apache.hadoop.util.Progressable"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.mapreduce.lib.aggregate.ValueAggregatorDescriptor":{"name":"org.apache.hadoop.mapreduce.lib.aggregate.ValueAggregatorDescriptor","methods":{"java.util.ArrayList generateKeyValPairs(java.lang.Object, java.lang.Object)":{"name":"generateKeyValPairs","returnType":"java.util.ArrayList","args":["java.lang.Object","java.lang.Object"],"exceptions":[]},"void configure(org.apache.hadoop.conf.Configuration)":{"name":"configure","returnType":"void","args":["org.apache.hadoop.conf.Configuration"],"exceptions":[]}}},"org.apache.hadoop.mapreduce.lib.map.MultithreadedMapper":{"name":"org.apache.hadoop.mapreduce.lib.map.MultithreadedMapper","methods":{"void run(org.apache.hadoop.mapreduce.Mapper$Context) throws java.lang.InterruptedException, java.io.IOException":{"name":"run","returnType":"void","args":["org.apache.hadoop.mapreduce.Mapper$Context"],"exceptions":["java.lang.InterruptedException","java.io.IOException"]},"void setMapperClass(org.apache.hadoop.mapreduce.Job, java.lang.Class)":{"name":"setMapperClass","returnType":"void","args":["org.apache.hadoop.mapreduce.Job","java.lang.Class"],"exceptions":[]},"int getNumberOfThreads(org.apache.hadoop.mapreduce.JobContext)":{"name":"getNumberOfThreads","returnType":"int","args":["org.apache.hadoop.mapreduce.JobContext"],"exceptions":[]},"java.lang.Class getMapperClass(org.apache.hadoop.mapreduce.JobContext)":{"name":"getMapperClass","returnType":"java.lang.Class","args":["org.apache.hadoop.mapreduce.JobContext"],"exceptions":[]},"void setNumberOfThreads(org.apache.hadoop.mapreduce.Job, int)":{"name":"setNumberOfThreads","returnType":"void","args":["org.apache.hadoop.mapreduce.Job","int"],"exceptions":[]}}},"org.apache.hadoop.mapred.Mapper":{"name":"org.apache.hadoop.mapred.Mapper","methods":{"void map(java.lang.Object, java.lang.Object, org.apache.hadoop.mapred.OutputCollector, org.apache.hadoop.mapred.Reporter) throws java.io.IOException":{"name":"map","returnType":"void","args":["java.lang.Object","java.lang.Object","org.apache.hadoop.mapred.OutputCollector","org.apache.hadoop.mapred.Reporter"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.mapreduce.TaskType":{"name":"org.apache.hadoop.mapreduce.TaskType","methods":{"[Lorg.apache.hadoop.mapreduce.TaskType; values()":{"name":"values","returnType":"[Lorg.apache.hadoop.mapreduce.TaskType;","args":[],"exceptions":[]},"org.apache.hadoop.mapreduce.TaskType valueOf(java.lang.String)":{"name":"valueOf","returnType":"org.apache.hadoop.mapreduce.TaskType","args":["java.lang.String"],"exceptions":[]}}},"org.apache.hadoop.mapreduce.lib.input.TextInputFormat":{"name":"org.apache.hadoop.mapreduce.lib.input.TextInputFormat","methods":{"org.apache.hadoop.mapreduce.RecordReader createRecordReader(org.apache.hadoop.mapreduce.InputSplit, org.apache.hadoop.mapreduce.TaskAttemptContext)":{"name":"createRecordReader","returnType":"org.apache.hadoop.mapreduce.RecordReader","args":["org.apache.hadoop.mapreduce.InputSplit","org.apache.hadoop.mapreduce.TaskAttemptContext"],"exceptions":[]}}},"org.apache.hadoop.mapred.OutputCommitter":{"name":"org.apache.hadoop.mapred.OutputCommitter","methods":{"void cleanupJob(org.apache.hadoop.mapred.JobContext) throws java.io.IOException":{"name":"cleanupJob","returnType":"void","args":["org.apache.hadoop.mapred.JobContext"],"exceptions":["java.io.IOException"]},"void commitJob(org.apache.hadoop.mapreduce.JobContext) throws java.io.IOException":{"name":"commitJob","returnType":"void","args":["org.apache.hadoop.mapreduce.JobContext"],"exceptions":["java.io.IOException"]},"void setupTask(org.apache.hadoop.mapred.TaskAttemptContext) throws java.io.IOException":{"name":"setupTask","returnType":"void","args":["org.apache.hadoop.mapred.TaskAttemptContext"],"exceptions":["java.io.IOException"]},"void abortTask(org.apache.hadoop.mapreduce.TaskAttemptContext) throws java.io.IOException":{"name":"abortTask","returnType":"void","args":["org.apache.hadoop.mapreduce.TaskAttemptContext"],"exceptions":["java.io.IOException"]},"void recoverTask(org.apache.hadoop.mapreduce.TaskAttemptContext) throws java.io.IOException":{"name":"recoverTask","returnType":"void","args":["org.apache.hadoop.mapreduce.TaskAttemptContext"],"exceptions":["java.io.IOException"]},"boolean isCommitJobRepeatable(org.apache.hadoop.mapred.JobContext) throws java.io.IOException":{"name":"isCommitJobRepeatable","returnType":"boolean","args":["org.apache.hadoop.mapred.JobContext"],"exceptions":["java.io.IOException"]},"void cleanupJob(org.apache.hadoop.mapreduce.JobContext) throws java.io.IOException":{"name":"cleanupJob","returnType":"void","args":["org.apache.hadoop.mapreduce.JobContext"],"exceptions":["java.io.IOException"]},"void abortJob(org.apache.hadoop.mapreduce.JobContext, org.apache.hadoop.mapreduce.JobStatus$State) throws java.io.IOException":{"name":"abortJob","returnType":"void","args":["org.apache.hadoop.mapreduce.JobContext","org.apache.hadoop.mapreduce.JobStatus$State"],"exceptions":["java.io.IOException"]},"void abortTask(org.apache.hadoop.mapred.TaskAttemptContext) throws java.io.IOException":{"name":"abortTask","returnType":"void","args":["org.apache.hadoop.mapred.TaskAttemptContext"],"exceptions":["java.io.IOException"]},"boolean needsTaskCommit(org.apache.hadoop.mapred.TaskAttemptContext) throws java.io.IOException":{"name":"needsTaskCommit","returnType":"boolean","args":["org.apache.hadoop.mapred.TaskAttemptContext"],"exceptions":["java.io.IOException"]},"boolean isCommitJobRepeatable(org.apache.hadoop.mapreduce.JobContext) throws java.io.IOException":{"name":"isCommitJobRepeatable","returnType":"boolean","args":["org.apache.hadoop.mapreduce.JobContext"],"exceptions":["java.io.IOException"]},"void abortJob(org.apache.hadoop.mapred.JobContext, int) throws java.io.IOException":{"name":"abortJob","returnType":"void","args":["org.apache.hadoop.mapred.JobContext","int"],"exceptions":["java.io.IOException"]},"void commitTask(org.apache.hadoop.mapred.TaskAttemptContext) throws java.io.IOException":{"name":"commitTask","returnType":"void","args":["org.apache.hadoop.mapred.TaskAttemptContext"],"exceptions":["java.io.IOException"]},"boolean isRecoverySupported(org.apache.hadoop.mapreduce.JobContext) throws java.io.IOException":{"name":"isRecoverySupported","returnType":"boolean","args":["org.apache.hadoop.mapreduce.JobContext"],"exceptions":["java.io.IOException"]},"boolean needsTaskCommit(org.apache.hadoop.mapreduce.TaskAttemptContext) throws java.io.IOException":{"name":"needsTaskCommit","returnType":"boolean","args":["org.apache.hadoop.mapreduce.TaskAttemptContext"],"exceptions":["java.io.IOException"]},"void setupJob(org.apache.hadoop.mapreduce.JobContext) throws java.io.IOException":{"name":"setupJob","returnType":"void","args":["org.apache.hadoop.mapreduce.JobContext"],"exceptions":["java.io.IOException"]},"void setupTask(org.apache.hadoop.mapreduce.TaskAttemptContext) throws java.io.IOException":{"name":"setupTask","returnType":"void","args":["org.apache.hadoop.mapreduce.TaskAttemptContext"],"exceptions":["java.io.IOException"]},"void commitTask(org.apache.hadoop.mapreduce.TaskAttemptContext) throws java.io.IOException":{"name":"commitTask","returnType":"void","args":["org.apache.hadoop.mapreduce.TaskAttemptContext"],"exceptions":["java.io.IOException"]},"boolean isRecoverySupported(org.apache.hadoop.mapred.JobContext) throws java.io.IOException":{"name":"isRecoverySupported","returnType":"boolean","args":["org.apache.hadoop.mapred.JobContext"],"exceptions":["java.io.IOException"]},"void setupJob(org.apache.hadoop.mapred.JobContext) throws java.io.IOException":{"name":"setupJob","returnType":"void","args":["org.apache.hadoop.mapred.JobContext"],"exceptions":["java.io.IOException"]},"void commitJob(org.apache.hadoop.mapred.JobContext) throws java.io.IOException":{"name":"commitJob","returnType":"void","args":["org.apache.hadoop.mapred.JobContext"],"exceptions":["java.io.IOException"]},"void recoverTask(org.apache.hadoop.mapred.TaskAttemptContext) throws java.io.IOException":{"name":"recoverTask","returnType":"void","args":["org.apache.hadoop.mapred.TaskAttemptContext"],"exceptions":["java.io.IOException"]},"boolean isRecoverySupported()":{"name":"isRecoverySupported","returnType":"boolean","args":[],"exceptions":[]}}},"org.apache.hadoop.mapred.InputFormat":{"name":"org.apache.hadoop.mapred.InputFormat","methods":{"org.apache.hadoop.mapred.RecordReader getRecordReader(org.apache.hadoop.mapred.InputSplit, org.apache.hadoop.mapred.JobConf, org.apache.hadoop.mapred.Reporter) throws java.io.IOException":{"name":"getRecordReader","returnType":"org.apache.hadoop.mapred.RecordReader","args":["org.apache.hadoop.mapred.InputSplit","org.apache.hadoop.mapred.JobConf","org.apache.hadoop.mapred.Reporter"],"exceptions":["java.io.IOException"]},"[Lorg.apache.hadoop.mapred.InputSplit; getSplits(org.apache.hadoop.mapred.JobConf, int) throws java.io.IOException":{"name":"getSplits","returnType":"[Lorg.apache.hadoop.mapred.InputSplit;","args":["org.apache.hadoop.mapred.JobConf","int"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.mapred.SkipBadRecords":{"name":"org.apache.hadoop.mapred.SkipBadRecords","methods":{"void setMapperMaxSkipRecords(org.apache.hadoop.conf.Configuration, long)":{"name":"setMapperMaxSkipRecords","returnType":"void","args":["org.apache.hadoop.conf.Configuration","long"],"exceptions":[]},"int getAttemptsToStartSkipping(org.apache.hadoop.conf.Configuration)":{"name":"getAttemptsToStartSkipping","returnType":"int","args":["org.apache.hadoop.conf.Configuration"],"exceptions":[]},"long getReducerMaxSkipGroups(org.apache.hadoop.conf.Configuration)":{"name":"getReducerMaxSkipGroups","returnType":"long","args":["org.apache.hadoop.conf.Configuration"],"exceptions":[]},"long getMapperMaxSkipRecords(org.apache.hadoop.conf.Configuration)":{"name":"getMapperMaxSkipRecords","returnType":"long","args":["org.apache.hadoop.conf.Configuration"],"exceptions":[]},"void setSkipOutputPath(org.apache.hadoop.mapred.JobConf, org.apache.hadoop.fs.Path)":{"name":"setSkipOutputPath","returnType":"void","args":["org.apache.hadoop.mapred.JobConf","org.apache.hadoop.fs.Path"],"exceptions":[]},"void setReducerMaxSkipGroups(org.apache.hadoop.conf.Configuration, long)":{"name":"setReducerMaxSkipGroups","returnType":"void","args":["org.apache.hadoop.conf.Configuration","long"],"exceptions":[]},"void setAutoIncrMapperProcCount(org.apache.hadoop.conf.Configuration, boolean)":{"name":"setAutoIncrMapperProcCount","returnType":"void","args":["org.apache.hadoop.conf.Configuration","boolean"],"exceptions":[]},"boolean getAutoIncrReducerProcCount(org.apache.hadoop.conf.Configuration)":{"name":"getAutoIncrReducerProcCount","returnType":"boolean","args":["org.apache.hadoop.conf.Configuration"],"exceptions":[]},"boolean getAutoIncrMapperProcCount(org.apache.hadoop.conf.Configuration)":{"name":"getAutoIncrMapperProcCount","returnType":"boolean","args":["org.apache.hadoop.conf.Configuration"],"exceptions":[]},"org.apache.hadoop.fs.Path getSkipOutputPath(org.apache.hadoop.conf.Configuration)":{"name":"getSkipOutputPath","returnType":"org.apache.hadoop.fs.Path","args":["org.apache.hadoop.conf.Configuration"],"exceptions":[]},"void setAutoIncrReducerProcCount(org.apache.hadoop.conf.Configuration, boolean)":{"name":"setAutoIncrReducerProcCount","returnType":"void","args":["org.apache.hadoop.conf.Configuration","boolean"],"exceptions":[]},"void setAttemptsToStartSkipping(org.apache.hadoop.conf.Configuration, int)":{"name":"setAttemptsToStartSkipping","returnType":"void","args":["org.apache.hadoop.conf.Configuration","int"],"exceptions":[]}}},"org.apache.hadoop.mapred.TaskAttemptID":{"name":"org.apache.hadoop.mapred.TaskAttemptID","methods":{"org.apache.hadoop.mapred.TaskAttemptID read(java.io.DataInput) throws java.io.IOException":{"name":"read","returnType":"org.apache.hadoop.mapred.TaskAttemptID","args":["java.io.DataInput"],"exceptions":["java.io.IOException"]},"java.lang.String getTaskAttemptIDsPattern(java.lang.String, java.lang.Integer, org.apache.hadoop.mapreduce.TaskType, java.lang.Integer, java.lang.Integer)":{"name":"getTaskAttemptIDsPattern","returnType":"java.lang.String","args":["java.lang.String","java.lang.Integer","org.apache.hadoop.mapreduce.TaskType","java.lang.Integer","java.lang.Integer"],"exceptions":[]},"org.apache.hadoop.mapred.TaskAttemptID forName(java.lang.String) throws java.lang.IllegalArgumentException":{"name":"forName","returnType":"org.apache.hadoop.mapred.TaskAttemptID","args":["java.lang.String"],"exceptions":["java.lang.IllegalArgumentException"]},"org.apache.hadoop.mapreduce.TaskID getTaskID()":{"name":"getTaskID","returnType":"org.apache.hadoop.mapreduce.TaskID","args":[],"exceptions":[]},"org.apache.hadoop.mapred.JobID getJobID()":{"name":"getJobID","returnType":"org.apache.hadoop.mapred.JobID","args":[],"exceptions":[]},"org.apache.hadoop.mapreduce.JobID getJobID()":{"name":"getJobID","returnType":"org.apache.hadoop.mapreduce.JobID","args":[],"exceptions":[]},"org.apache.hadoop.mapred.TaskID getTaskID()":{"name":"getTaskID","returnType":"org.apache.hadoop.mapred.TaskID","args":[],"exceptions":[]},"org.apache.hadoop.mapred.TaskAttemptID downgrade(org.apache.hadoop.mapreduce.TaskAttemptID)":{"name":"downgrade","returnType":"org.apache.hadoop.mapred.TaskAttemptID","args":["org.apache.hadoop.mapreduce.TaskAttemptID"],"exceptions":[]},"java.lang.String getTaskAttemptIDsPattern(java.lang.String, java.lang.Integer, java.lang.Boolean, java.lang.Integer, java.lang.Integer)":{"name":"getTaskAttemptIDsPattern","returnType":"java.lang.String","args":["java.lang.String","java.lang.Integer","java.lang.Boolean","java.lang.Integer","java.lang.Integer"],"exceptions":[]}}},"org.apache.hadoop.mapreduce.lib.aggregate.ValueAggregatorCombiner":{"name":"org.apache.hadoop.mapreduce.lib.aggregate.ValueAggregatorCombiner","methods":{"void reduce(java.lang.Object, java.lang.Iterable, org.apache.hadoop.mapreduce.Reducer$Context) throws java.lang.InterruptedException, java.io.IOException":{"name":"reduce","returnType":"void","args":["java.lang.Object","java.lang.Iterable","org.apache.hadoop.mapreduce.Reducer$Context"],"exceptions":["java.lang.InterruptedException","java.io.IOException"]},"void reduce(org.apache.hadoop.io.Text, java.lang.Iterable, org.apache.hadoop.mapreduce.Reducer$Context) throws java.lang.InterruptedException, java.io.IOException":{"name":"reduce","returnType":"void","args":["org.apache.hadoop.io.Text","java.lang.Iterable","org.apache.hadoop.mapreduce.Reducer$Context"],"exceptions":["java.lang.InterruptedException","java.io.IOException"]}}},"org.apache.hadoop.mapred.OutputFormat":{"name":"org.apache.hadoop.mapred.OutputFormat","methods":{"void checkOutputSpecs(org.apache.hadoop.fs.FileSystem, org.apache.hadoop.mapred.JobConf) throws java.io.IOException":{"name":"checkOutputSpecs","returnType":"void","args":["org.apache.hadoop.fs.FileSystem","org.apache.hadoop.mapred.JobConf"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.mapred.RecordWriter getRecordWriter(org.apache.hadoop.fs.FileSystem, org.apache.hadoop.mapred.JobConf, java.lang.String, org.apache.hadoop.util.Progressable) throws java.io.IOException":{"name":"getRecordWriter","returnType":"org.apache.hadoop.mapred.RecordWriter","args":["org.apache.hadoop.fs.FileSystem","org.apache.hadoop.mapred.JobConf","java.lang.String","org.apache.hadoop.util.Progressable"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.mapred.RunningJob":{"name":"org.apache.hadoop.mapred.RunningJob","methods":{"void setJobPriority(java.lang.String) throws java.io.IOException":{"name":"setJobPriority","returnType":"void","args":["java.lang.String"],"exceptions":["java.io.IOException"]},"boolean isComplete() throws java.io.IOException":{"name":"isComplete","returnType":"boolean","args":[],"exceptions":["java.io.IOException"]},"void killTask(java.lang.String, boolean) throws java.io.IOException":{"name":"killTask","returnType":"void","args":["java.lang.String","boolean"],"exceptions":["java.io.IOException"]},"float cleanupProgress() throws java.io.IOException":{"name":"cleanupProgress","returnType":"float","args":[],"exceptions":["java.io.IOException"]},"int getJobState() throws java.io.IOException":{"name":"getJobState","returnType":"int","args":[],"exceptions":["java.io.IOException"]},"boolean isSuccessful() throws java.io.IOException":{"name":"isSuccessful","returnType":"boolean","args":[],"exceptions":["java.io.IOException"]},"float mapProgress() throws java.io.IOException":{"name":"mapProgress","returnType":"float","args":[],"exceptions":["java.io.IOException"]},"float setupProgress() throws java.io.IOException":{"name":"setupProgress","returnType":"float","args":[],"exceptions":["java.io.IOException"]},"java.lang.String getFailureInfo() throws java.io.IOException":{"name":"getFailureInfo","returnType":"java.lang.String","args":[],"exceptions":["java.io.IOException"]},"java.lang.String getJobName()":{"name":"getJobName","returnType":"java.lang.String","args":[],"exceptions":[]},"org.apache.hadoop.mapred.JobID getID()":{"name":"getID","returnType":"org.apache.hadoop.mapred.JobID","args":[],"exceptions":[]},"org.apache.hadoop.mapred.JobStatus getJobStatus() throws java.io.IOException":{"name":"getJobStatus","returnType":"org.apache.hadoop.mapred.JobStatus","args":[],"exceptions":["java.io.IOException"]},"java.lang.String getJobID()":{"name":"getJobID","returnType":"java.lang.String","args":[],"exceptions":[]},"java.lang.String getJobFile()":{"name":"getJobFile","returnType":"java.lang.String","args":[],"exceptions":[]},"void waitForCompletion() throws java.io.IOException":{"name":"waitForCompletion","returnType":"void","args":[],"exceptions":["java.io.IOException"]},"[Lorg.apache.hadoop.mapred.TaskCompletionEvent; getTaskCompletionEvents(int) throws java.io.IOException":{"name":"getTaskCompletionEvents","returnType":"[Lorg.apache.hadoop.mapred.TaskCompletionEvent;","args":["int"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.mapred.Counters getCounters() throws java.io.IOException":{"name":"getCounters","returnType":"org.apache.hadoop.mapred.Counters","args":[],"exceptions":["java.io.IOException"]},"java.lang.String getHistoryUrl() throws java.io.IOException":{"name":"getHistoryUrl","returnType":"java.lang.String","args":[],"exceptions":["java.io.IOException"]},"float reduceProgress() throws java.io.IOException":{"name":"reduceProgress","returnType":"float","args":[],"exceptions":["java.io.IOException"]},"void killTask(org.apache.hadoop.mapred.TaskAttemptID, boolean) throws java.io.IOException":{"name":"killTask","returnType":"void","args":["org.apache.hadoop.mapred.TaskAttemptID","boolean"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.conf.Configuration getConfiguration()":{"name":"getConfiguration","returnType":"org.apache.hadoop.conf.Configuration","args":[],"exceptions":[]},"[Ljava.lang.String; getTaskDiagnostics(org.apache.hadoop.mapred.TaskAttemptID) throws java.io.IOException":{"name":"getTaskDiagnostics","returnType":"[Ljava.lang.String;","args":["org.apache.hadoop.mapred.TaskAttemptID"],"exceptions":["java.io.IOException"]},"boolean isRetired() throws java.io.IOException":{"name":"isRetired","returnType":"boolean","args":[],"exceptions":["java.io.IOException"]},"void killJob() throws java.io.IOException":{"name":"killJob","returnType":"void","args":[],"exceptions":["java.io.IOException"]},"java.lang.String getTrackingURL()":{"name":"getTrackingURL","returnType":"java.lang.String","args":[],"exceptions":[]}}},"org.apache.hadoop.mapred.FileInputFormat":{"name":"org.apache.hadoop.mapred.FileInputFormat","methods":{"void setInputPaths(org.apache.hadoop.mapred.JobConf, java.lang.String)":{"name":"setInputPaths","returnType":"void","args":["org.apache.hadoop.mapred.JobConf","java.lang.String"],"exceptions":[]},"org.apache.hadoop.mapred.RecordReader getRecordReader(org.apache.hadoop.mapred.InputSplit, org.apache.hadoop.mapred.JobConf, org.apache.hadoop.mapred.Reporter) throws java.io.IOException":{"name":"getRecordReader","returnType":"org.apache.hadoop.mapred.RecordReader","args":["org.apache.hadoop.mapred.InputSplit","org.apache.hadoop.mapred.JobConf","org.apache.hadoop.mapred.Reporter"],"exceptions":["java.io.IOException"]},"void setInputPathFilter(org.apache.hadoop.mapred.JobConf, java.lang.Class)":{"name":"setInputPathFilter","returnType":"void","args":["org.apache.hadoop.mapred.JobConf","java.lang.Class"],"exceptions":[]},"void addInputPaths(org.apache.hadoop.mapred.JobConf, java.lang.String)":{"name":"addInputPaths","returnType":"void","args":["org.apache.hadoop.mapred.JobConf","java.lang.String"],"exceptions":[]},"void setInputPaths(org.apache.hadoop.mapred.JobConf, [Lorg.apache.hadoop.fs.Path;)":{"name":"setInputPaths","returnType":"void","args":["org.apache.hadoop.mapred.JobConf","[Lorg.apache.hadoop.fs.Path;"],"exceptions":[]},"org.apache.hadoop.fs.PathFilter getInputPathFilter(org.apache.hadoop.mapred.JobConf)":{"name":"getInputPathFilter","returnType":"org.apache.hadoop.fs.PathFilter","args":["org.apache.hadoop.mapred.JobConf"],"exceptions":[]},"[Lorg.apache.hadoop.fs.Path; getInputPaths(org.apache.hadoop.mapred.JobConf)":{"name":"getInputPaths","returnType":"[Lorg.apache.hadoop.fs.Path;","args":["org.apache.hadoop.mapred.JobConf"],"exceptions":[]},"void addInputPath(org.apache.hadoop.mapred.JobConf, org.apache.hadoop.fs.Path)":{"name":"addInputPath","returnType":"void","args":["org.apache.hadoop.mapred.JobConf","org.apache.hadoop.fs.Path"],"exceptions":[]},"[Lorg.apache.hadoop.mapred.InputSplit; getSplits(org.apache.hadoop.mapred.JobConf, int) throws java.io.IOException":{"name":"getSplits","returnType":"[Lorg.apache.hadoop.mapred.InputSplit;","args":["org.apache.hadoop.mapred.JobConf","int"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.mapreduce.lib.input.SequenceFileAsTextRecordReader":{"name":"org.apache.hadoop.mapreduce.lib.input.SequenceFileAsTextRecordReader","methods":{"org.apache.hadoop.io.Text getCurrentValue() throws java.lang.InterruptedException, java.io.IOException":{"name":"getCurrentValue","returnType":"org.apache.hadoop.io.Text","args":[],"exceptions":["java.lang.InterruptedException","java.io.IOException"]},"java.lang.Object getCurrentValue() throws java.lang.InterruptedException, java.io.IOException":{"name":"getCurrentValue","returnType":"java.lang.Object","args":[],"exceptions":["java.lang.InterruptedException","java.io.IOException"]},"void initialize(org.apache.hadoop.mapreduce.InputSplit, org.apache.hadoop.mapreduce.TaskAttemptContext) throws java.lang.InterruptedException, java.io.IOException":{"name":"initialize","returnType":"void","args":["org.apache.hadoop.mapreduce.InputSplit","org.apache.hadoop.mapreduce.TaskAttemptContext"],"exceptions":["java.lang.InterruptedException","java.io.IOException"]},"org.apache.hadoop.io.Text getCurrentKey() throws java.lang.InterruptedException, java.io.IOException":{"name":"getCurrentKey","returnType":"org.apache.hadoop.io.Text","args":[],"exceptions":["java.lang.InterruptedException","java.io.IOException"]},"boolean nextKeyValue() throws java.lang.InterruptedException, java.io.IOException":{"name":"nextKeyValue","returnType":"boolean","args":[],"exceptions":["java.lang.InterruptedException","java.io.IOException"]},"float getProgress() throws java.lang.InterruptedException, java.io.IOException":{"name":"getProgress","returnType":"float","args":[],"exceptions":["java.lang.InterruptedException","java.io.IOException"]},"java.lang.Object getCurrentKey() throws java.lang.InterruptedException, java.io.IOException":{"name":"getCurrentKey","returnType":"java.lang.Object","args":[],"exceptions":["java.lang.InterruptedException","java.io.IOException"]},"void close() throws java.io.IOException":{"name":"close","returnType":"void","args":[],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.mapreduce.lib.output.MapFileOutputFormat":{"name":"org.apache.hadoop.mapreduce.lib.output.MapFileOutputFormat","methods":{"org.apache.hadoop.mapreduce.RecordWriter getRecordWriter(org.apache.hadoop.mapreduce.TaskAttemptContext) throws java.io.IOException":{"name":"getRecordWriter","returnType":"org.apache.hadoop.mapreduce.RecordWriter","args":["org.apache.hadoop.mapreduce.TaskAttemptContext"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.io.Writable getEntry([Lorg.apache.hadoop.io.MapFile$Reader;, org.apache.hadoop.mapreduce.Partitioner, org.apache.hadoop.io.WritableComparable, org.apache.hadoop.io.Writable) throws java.io.IOException":{"name":"getEntry","returnType":"org.apache.hadoop.io.Writable","args":["[Lorg.apache.hadoop.io.MapFile$Reader;","org.apache.hadoop.mapreduce.Partitioner","org.apache.hadoop.io.WritableComparable","org.apache.hadoop.io.Writable"],"exceptions":["java.io.IOException"]},"[Lorg.apache.hadoop.io.MapFile$Reader; getReaders(org.apache.hadoop.fs.Path, org.apache.hadoop.conf.Configuration) throws java.io.IOException":{"name":"getReaders","returnType":"[Lorg.apache.hadoop.io.MapFile$Reader;","args":["org.apache.hadoop.fs.Path","org.apache.hadoop.conf.Configuration"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.mapred.lib.aggregate.LongValueMax":{"name":"org.apache.hadoop.mapred.lib.aggregate.LongValueMax","methods":{}},"org.apache.hadoop.mapred.lib.CombineFileRecordReader":{"name":"org.apache.hadoop.mapred.lib.CombineFileRecordReader","methods":{"long getPos() throws java.io.IOException":{"name":"getPos","returnType":"long","args":[],"exceptions":["java.io.IOException"]},"float getProgress() throws java.io.IOException":{"name":"getProgress","returnType":"float","args":[],"exceptions":["java.io.IOException"]},"java.lang.Object createKey()":{"name":"createKey","returnType":"java.lang.Object","args":[],"exceptions":[]},"java.lang.Object createValue()":{"name":"createValue","returnType":"java.lang.Object","args":[],"exceptions":[]},"boolean next(java.lang.Object, java.lang.Object) throws java.io.IOException":{"name":"next","returnType":"boolean","args":["java.lang.Object","java.lang.Object"],"exceptions":["java.io.IOException"]},"void close() throws java.io.IOException":{"name":"close","returnType":"void","args":[],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.mapreduce.Mapper":{"name":"org.apache.hadoop.mapreduce.Mapper","methods":{"void run(org.apache.hadoop.mapreduce.Mapper$Context) throws java.lang.InterruptedException, java.io.IOException":{"name":"run","returnType":"void","args":["org.apache.hadoop.mapreduce.Mapper$Context"],"exceptions":["java.lang.InterruptedException","java.io.IOException"]}}},"org.apache.hadoop.mapreduce.lib.input.FixedLengthInputFormat":{"name":"org.apache.hadoop.mapreduce.lib.input.FixedLengthInputFormat","methods":{"org.apache.hadoop.mapreduce.RecordReader createRecordReader(org.apache.hadoop.mapreduce.InputSplit, org.apache.hadoop.mapreduce.TaskAttemptContext) throws java.lang.InterruptedException, java.io.IOException":{"name":"createRecordReader","returnType":"org.apache.hadoop.mapreduce.RecordReader","args":["org.apache.hadoop.mapreduce.InputSplit","org.apache.hadoop.mapreduce.TaskAttemptContext"],"exceptions":["java.lang.InterruptedException","java.io.IOException"]},"void setRecordLength(org.apache.hadoop.conf.Configuration, int)":{"name":"setRecordLength","returnType":"void","args":["org.apache.hadoop.conf.Configuration","int"],"exceptions":[]},"int getRecordLength(org.apache.hadoop.conf.Configuration)":{"name":"getRecordLength","returnType":"int","args":["org.apache.hadoop.conf.Configuration"],"exceptions":[]}}},"org.apache.hadoop.mapred.InvalidInputException":{"name":"org.apache.hadoop.mapred.InvalidInputException","methods":{"java.util.List getProblems()":{"name":"getProblems","returnType":"java.util.List","args":[],"exceptions":[]},"java.lang.String getMessage()":{"name":"getMessage","returnType":"java.lang.String","args":[],"exceptions":[]}}},"org.apache.hadoop.mapreduce.lib.input.MultipleInputs":{"name":"org.apache.hadoop.mapreduce.lib.input.MultipleInputs","methods":{"void addInputPath(org.apache.hadoop.mapreduce.Job, org.apache.hadoop.fs.Path, java.lang.Class)":{"name":"addInputPath","returnType":"void","args":["org.apache.hadoop.mapreduce.Job","org.apache.hadoop.fs.Path","java.lang.Class"],"exceptions":[]},"void addInputPath(org.apache.hadoop.mapreduce.Job, org.apache.hadoop.fs.Path, java.lang.Class, java.lang.Class)":{"name":"addInputPath","returnType":"void","args":["org.apache.hadoop.mapreduce.Job","org.apache.hadoop.fs.Path","java.lang.Class","java.lang.Class"],"exceptions":[]}}},"org.apache.hadoop.mapred.OutputCollector":{"name":"org.apache.hadoop.mapred.OutputCollector","methods":{"void collect(java.lang.Object, java.lang.Object) throws java.io.IOException":{"name":"collect","returnType":"void","args":["java.lang.Object","java.lang.Object"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.mapreduce.lib.output.MultipleOutputs":{"name":"org.apache.hadoop.mapreduce.lib.output.MultipleOutputs","methods":{"void write(java.lang.Object, java.lang.Object, java.lang.String) throws java.lang.InterruptedException, java.io.IOException":{"name":"write","returnType":"void","args":["java.lang.Object","java.lang.Object","java.lang.String"],"exceptions":["java.lang.InterruptedException","java.io.IOException"]},"void setCountersEnabled(org.apache.hadoop.mapreduce.Job, boolean)":{"name":"setCountersEnabled","returnType":"void","args":["org.apache.hadoop.mapreduce.Job","boolean"],"exceptions":[]},"boolean getCountersEnabled(org.apache.hadoop.mapreduce.JobContext)":{"name":"getCountersEnabled","returnType":"boolean","args":["org.apache.hadoop.mapreduce.JobContext"],"exceptions":[]},"void close() throws java.lang.InterruptedException, java.io.IOException":{"name":"close","returnType":"void","args":[],"exceptions":["java.lang.InterruptedException","java.io.IOException"]},"void write(java.lang.String, java.lang.Object, java.lang.Object, java.lang.String) throws java.lang.InterruptedException, java.io.IOException":{"name":"write","returnType":"void","args":["java.lang.String","java.lang.Object","java.lang.Object","java.lang.String"],"exceptions":["java.lang.InterruptedException","java.io.IOException"]},"void addNamedOutput(org.apache.hadoop.mapreduce.Job, java.lang.String, java.lang.Class, java.lang.Class, java.lang.Class)":{"name":"addNamedOutput","returnType":"void","args":["org.apache.hadoop.mapreduce.Job","java.lang.String","java.lang.Class","java.lang.Class","java.lang.Class"],"exceptions":[]},"void write(java.lang.String, java.lang.Object, java.lang.Object) throws java.lang.InterruptedException, java.io.IOException":{"name":"write","returnType":"void","args":["java.lang.String","java.lang.Object","java.lang.Object"],"exceptions":["java.lang.InterruptedException","java.io.IOException"]}}},"org.apache.hadoop.mapred.join.OverrideRecordReader":{"name":"org.apache.hadoop.mapred.join.OverrideRecordReader","methods":{}},"org.apache.hadoop.mapred.join.ComposableRecordReader":{"name":"org.apache.hadoop.mapred.join.ComposableRecordReader","methods":{"int id()":{"name":"id","returnType":"int","args":[],"exceptions":[]},"void accept(org.apache.hadoop.mapred.join.CompositeRecordReader$JoinCollector, org.apache.hadoop.io.WritableComparable) throws java.io.IOException":{"name":"accept","returnType":"void","args":["org.apache.hadoop.mapred.join.CompositeRecordReader$JoinCollector","org.apache.hadoop.io.WritableComparable"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.io.WritableComparable key()":{"name":"key","returnType":"org.apache.hadoop.io.WritableComparable","args":[],"exceptions":[]},"void skip(org.apache.hadoop.io.WritableComparable) throws java.io.IOException":{"name":"skip","returnType":"void","args":["org.apache.hadoop.io.WritableComparable"],"exceptions":["java.io.IOException"]},"boolean hasNext()":{"name":"hasNext","returnType":"boolean","args":[],"exceptions":[]},"void key(org.apache.hadoop.io.WritableComparable) throws java.io.IOException":{"name":"key","returnType":"void","args":["org.apache.hadoop.io.WritableComparable"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.mapred.TextInputFormat":{"name":"org.apache.hadoop.mapred.TextInputFormat","methods":{"org.apache.hadoop.mapred.RecordReader getRecordReader(org.apache.hadoop.mapred.InputSplit, org.apache.hadoop.mapred.JobConf, org.apache.hadoop.mapred.Reporter) throws java.io.IOException":{"name":"getRecordReader","returnType":"org.apache.hadoop.mapred.RecordReader","args":["org.apache.hadoop.mapred.InputSplit","org.apache.hadoop.mapred.JobConf","org.apache.hadoop.mapred.Reporter"],"exceptions":["java.io.IOException"]},"void configure(org.apache.hadoop.mapred.JobConf)":{"name":"configure","returnType":"void","args":["org.apache.hadoop.mapred.JobConf"],"exceptions":[]}}},"org.apache.hadoop.mapred.lib.InverseMapper":{"name":"org.apache.hadoop.mapred.lib.InverseMapper","methods":{"void map(java.lang.Object, java.lang.Object, org.apache.hadoop.mapred.OutputCollector, org.apache.hadoop.mapred.Reporter) throws java.io.IOException":{"name":"map","returnType":"void","args":["java.lang.Object","java.lang.Object","org.apache.hadoop.mapred.OutputCollector","org.apache.hadoop.mapred.Reporter"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.mapreduce.lib.fieldsel.FieldSelectionMapper":{"name":"org.apache.hadoop.mapreduce.lib.fieldsel.FieldSelectionMapper","methods":{"void map(java.lang.Object, java.lang.Object, org.apache.hadoop.mapreduce.Mapper$Context) throws java.lang.InterruptedException, java.io.IOException":{"name":"map","returnType":"void","args":["java.lang.Object","java.lang.Object","org.apache.hadoop.mapreduce.Mapper$Context"],"exceptions":["java.lang.InterruptedException","java.io.IOException"]},"void setup(org.apache.hadoop.mapreduce.Mapper$Context) throws java.lang.InterruptedException, java.io.IOException":{"name":"setup","returnType":"void","args":["org.apache.hadoop.mapreduce.Mapper$Context"],"exceptions":["java.lang.InterruptedException","java.io.IOException"]}}},"org.apache.hadoop.mapreduce.counters.AbstractCounters":{"name":"org.apache.hadoop.mapreduce.counters.AbstractCounters","methods":{"int hashCode()":{"name":"hashCode","returnType":"int","args":[],"exceptions":[]},"org.apache.hadoop.mapreduce.Counter findCounter(java.lang.Enum)":{"name":"findCounter","returnType":"org.apache.hadoop.mapreduce.Counter","args":["java.lang.Enum"],"exceptions":[]},"int countCounters()":{"name":"countCounters","returnType":"int","args":[],"exceptions":[]},"boolean getWriteAllCounters()":{"name":"getWriteAllCounters","returnType":"boolean","args":[],"exceptions":[]},"org.apache.hadoop.mapreduce.counters.CounterGroupBase addGroup(org.apache.hadoop.mapreduce.counters.CounterGroupBase)":{"name":"addGroup","returnType":"org.apache.hadoop.mapreduce.counters.CounterGroupBase","args":["org.apache.hadoop.mapreduce.counters.CounterGroupBase"],"exceptions":[]},"boolean equals(java.lang.Object)":{"name":"equals","returnType":"boolean","args":["java.lang.Object"],"exceptions":[]},"org.apache.hadoop.mapreduce.Counter findCounter(java.lang.String, java.lang.String)":{"name":"findCounter","returnType":"org.apache.hadoop.mapreduce.Counter","args":["java.lang.String","java.lang.String"],"exceptions":[]},"java.lang.String toString()":{"name":"toString","returnType":"java.lang.String","args":[],"exceptions":[]},"void write(java.io.DataOutput) throws java.io.IOException":{"name":"write","returnType":"void","args":["java.io.DataOutput"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.mapreduce.Counter findCounter(java.lang.String, org.apache.hadoop.mapreduce.FileSystemCounter)":{"name":"findCounter","returnType":"org.apache.hadoop.mapreduce.Counter","args":["java.lang.String","org.apache.hadoop.mapreduce.FileSystemCounter"],"exceptions":[]},"org.apache.hadoop.mapreduce.counters.CounterGroupBase addGroup(java.lang.String, java.lang.String)":{"name":"addGroup","returnType":"org.apache.hadoop.mapreduce.counters.CounterGroupBase","args":["java.lang.String","java.lang.String"],"exceptions":[]},"org.apache.hadoop.mapreduce.counters.CounterGroupBase getGroup(java.lang.String)":{"name":"getGroup","returnType":"org.apache.hadoop.mapreduce.counters.CounterGroupBase","args":["java.lang.String"],"exceptions":[]},"java.util.Iterator iterator()":{"name":"iterator","returnType":"java.util.Iterator","args":[],"exceptions":[]},"org.apache.hadoop.mapreduce.counters.Limits limits()":{"name":"limits","returnType":"org.apache.hadoop.mapreduce.counters.Limits","args":[],"exceptions":[]},"void setWriteAllCounters(boolean)":{"name":"setWriteAllCounters","returnType":"void","args":["boolean"],"exceptions":[]},"java.lang.Iterable getGroupNames()":{"name":"getGroupNames","returnType":"java.lang.Iterable","args":[],"exceptions":[]},"void incrAllCounters(org.apache.hadoop.mapreduce.counters.AbstractCounters)":{"name":"incrAllCounters","returnType":"void","args":["org.apache.hadoop.mapreduce.counters.AbstractCounters"],"exceptions":[]},"void readFields(java.io.DataInput) throws java.io.IOException":{"name":"readFields","returnType":"void","args":["java.io.DataInput"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.mapred.SequenceFileAsTextInputFormat":{"name":"org.apache.hadoop.mapred.SequenceFileAsTextInputFormat","methods":{"org.apache.hadoop.mapred.RecordReader getRecordReader(org.apache.hadoop.mapred.InputSplit, org.apache.hadoop.mapred.JobConf, org.apache.hadoop.mapred.Reporter) throws java.io.IOException":{"name":"getRecordReader","returnType":"org.apache.hadoop.mapred.RecordReader","args":["org.apache.hadoop.mapred.InputSplit","org.apache.hadoop.mapred.JobConf","org.apache.hadoop.mapred.Reporter"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.mapred.Counters$Group":{"name":"org.apache.hadoop.mapred.Counters$Group","methods":{"int hashCode()":{"name":"hashCode","returnType":"int","args":[],"exceptions":[]},"org.apache.hadoop.mapred.Counters$Counter findCounter(java.lang.String, java.lang.String)":{"name":"findCounter","returnType":"org.apache.hadoop.mapred.Counters$Counter","args":["java.lang.String","java.lang.String"],"exceptions":[]},"int size()":{"name":"size","returnType":"int","args":[],"exceptions":[]},"org.apache.hadoop.mapred.Counters$Counter getCounter(int, java.lang.String)":{"name":"getCounter","returnType":"org.apache.hadoop.mapred.Counters$Counter","args":["int","java.lang.String"],"exceptions":[]},"java.lang.String makeEscapedCompactString()":{"name":"makeEscapedCompactString","returnType":"java.lang.String","args":[],"exceptions":[]},"org.apache.hadoop.mapreduce.Counter findCounter(java.lang.String, boolean)":{"name":"findCounter","returnType":"org.apache.hadoop.mapreduce.Counter","args":["java.lang.String","boolean"],"exceptions":[]},"boolean equals(java.lang.Object)":{"name":"equals","returnType":"boolean","args":["java.lang.Object"],"exceptions":[]},"org.apache.hadoop.mapreduce.Counter findCounter(java.lang.String, java.lang.String)":{"name":"findCounter","returnType":"org.apache.hadoop.mapreduce.Counter","args":["java.lang.String","java.lang.String"],"exceptions":[]},"void write(java.io.DataOutput) throws java.io.IOException":{"name":"write","returnType":"void","args":["java.io.DataOutput"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.mapred.Counters$Counter addCounter(java.lang.String, java.lang.String, long)":{"name":"addCounter","returnType":"org.apache.hadoop.mapred.Counters$Counter","args":["java.lang.String","java.lang.String","long"],"exceptions":[]},"org.apache.hadoop.mapred.Counters$Counter getCounterForName(java.lang.String)":{"name":"getCounterForName","returnType":"org.apache.hadoop.mapred.Counters$Counter","args":["java.lang.String"],"exceptions":[]},"org.apache.hadoop.mapreduce.counters.CounterGroupBase getUnderlyingGroup()":{"name":"getUnderlyingGroup","returnType":"org.apache.hadoop.mapreduce.counters.CounterGroupBase","args":[],"exceptions":[]},"java.util.Iterator iterator()":{"name":"iterator","returnType":"java.util.Iterator","args":[],"exceptions":[]},"void incrAllCounters(org.apache.hadoop.mapreduce.counters.CounterGroupBase)":{"name":"incrAllCounters","returnType":"void","args":["org.apache.hadoop.mapreduce.counters.CounterGroupBase"],"exceptions":[]},"void setDisplayName(java.lang.String)":{"name":"setDisplayName","returnType":"void","args":["java.lang.String"],"exceptions":[]},"org.apache.hadoop.mapred.Counters$Counter findCounter(java.lang.String)":{"name":"findCounter","returnType":"org.apache.hadoop.mapred.Counters$Counter","args":["java.lang.String"],"exceptions":[]},"org.apache.hadoop.mapreduce.Counter addCounter(java.lang.String, java.lang.String, long)":{"name":"addCounter","returnType":"org.apache.hadoop.mapreduce.Counter","args":["java.lang.String","java.lang.String","long"],"exceptions":[]},"long getCounter(java.lang.String)":{"name":"getCounter","returnType":"long","args":["java.lang.String"],"exceptions":[]},"org.apache.hadoop.mapred.Counters$Counter findCounter(java.lang.String, boolean)":{"name":"findCounter","returnType":"org.apache.hadoop.mapred.Counters$Counter","args":["java.lang.String","boolean"],"exceptions":[]},"org.apache.hadoop.mapreduce.Counter findCounter(java.lang.String)":{"name":"findCounter","returnType":"org.apache.hadoop.mapreduce.Counter","args":["java.lang.String"],"exceptions":[]},"void addCounter(org.apache.hadoop.mapred.Counters$Counter)":{"name":"addCounter","returnType":"void","args":["org.apache.hadoop.mapred.Counters$Counter"],"exceptions":[]},"java.lang.String getName()":{"name":"getName","returnType":"java.lang.String","args":[],"exceptions":[]},"void addCounter(org.apache.hadoop.mapreduce.Counter)":{"name":"addCounter","returnType":"void","args":["org.apache.hadoop.mapreduce.Counter"],"exceptions":[]},"java.lang.String getDisplayName()":{"name":"getDisplayName","returnType":"java.lang.String","args":[],"exceptions":[]},"void readFields(java.io.DataInput) throws java.io.IOException":{"name":"readFields","returnType":"void","args":["java.io.DataInput"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.mapred.lib.MultipleSequenceFileOutputFormat":{"name":"org.apache.hadoop.mapred.lib.MultipleSequenceFileOutputFormat","methods":{}},"org.apache.hadoop.mapreduce.lib.map.InverseMapper":{"name":"org.apache.hadoop.mapreduce.lib.map.InverseMapper","methods":{"void map(java.lang.Object, java.lang.Object, org.apache.hadoop.mapreduce.Mapper$Context) throws java.lang.InterruptedException, java.io.IOException":{"name":"map","returnType":"void","args":["java.lang.Object","java.lang.Object","org.apache.hadoop.mapreduce.Mapper$Context"],"exceptions":["java.lang.InterruptedException","java.io.IOException"]}}},"org.apache.hadoop.mapreduce.lib.db.DBWritable":{"name":"org.apache.hadoop.mapreduce.lib.db.DBWritable","methods":{"void readFields(java.sql.ResultSet) throws java.sql.SQLException":{"name":"readFields","returnType":"void","args":["java.sql.ResultSet"],"exceptions":["java.sql.SQLException"]},"void write(java.sql.PreparedStatement) throws java.sql.SQLException":{"name":"write","returnType":"void","args":["java.sql.PreparedStatement"],"exceptions":["java.sql.SQLException"]}}},"org.apache.hadoop.mapreduce.lib.input.CombineFileRecordReader":{"name":"org.apache.hadoop.mapreduce.lib.input.CombineFileRecordReader","methods":{"java.lang.Object getCurrentValue() throws java.lang.InterruptedException, java.io.IOException":{"name":"getCurrentValue","returnType":"java.lang.Object","args":[],"exceptions":["java.lang.InterruptedException","java.io.IOException"]},"void initialize(org.apache.hadoop.mapreduce.InputSplit, org.apache.hadoop.mapreduce.TaskAttemptContext) throws java.lang.InterruptedException, java.io.IOException":{"name":"initialize","returnType":"void","args":["org.apache.hadoop.mapreduce.InputSplit","org.apache.hadoop.mapreduce.TaskAttemptContext"],"exceptions":["java.lang.InterruptedException","java.io.IOException"]},"boolean nextKeyValue() throws java.lang.InterruptedException, java.io.IOException":{"name":"nextKeyValue","returnType":"boolean","args":[],"exceptions":["java.lang.InterruptedException","java.io.IOException"]},"float getProgress() throws java.lang.InterruptedException, java.io.IOException":{"name":"getProgress","returnType":"float","args":[],"exceptions":["java.lang.InterruptedException","java.io.IOException"]},"java.lang.Object getCurrentKey() throws java.lang.InterruptedException, java.io.IOException":{"name":"getCurrentKey","returnType":"java.lang.Object","args":[],"exceptions":["java.lang.InterruptedException","java.io.IOException"]},"void close() throws java.io.IOException":{"name":"close","returnType":"void","args":[],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.mapreduce.lib.input.FileInputFormat":{"name":"org.apache.hadoop.mapreduce.lib.input.FileInputFormat","methods":{"void setInputPaths(org.apache.hadoop.mapreduce.Job, [Lorg.apache.hadoop.fs.Path;) throws java.io.IOException":{"name":"setInputPaths","returnType":"void","args":["org.apache.hadoop.mapreduce.Job","[Lorg.apache.hadoop.fs.Path;"],"exceptions":["java.io.IOException"]},"void setInputPathFilter(org.apache.hadoop.mapreduce.Job, java.lang.Class)":{"name":"setInputPathFilter","returnType":"void","args":["org.apache.hadoop.mapreduce.Job","java.lang.Class"],"exceptions":[]},"long getMinSplitSize(org.apache.hadoop.mapreduce.JobContext)":{"name":"getMinSplitSize","returnType":"long","args":["org.apache.hadoop.mapreduce.JobContext"],"exceptions":[]},"void addInputPath(org.apache.hadoop.mapreduce.Job, org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"addInputPath","returnType":"void","args":["org.apache.hadoop.mapreduce.Job","org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"[Lorg.apache.hadoop.fs.Path; getInputPaths(org.apache.hadoop.mapreduce.JobContext)":{"name":"getInputPaths","returnType":"[Lorg.apache.hadoop.fs.Path;","args":["org.apache.hadoop.mapreduce.JobContext"],"exceptions":[]},"boolean getInputDirRecursive(org.apache.hadoop.mapreduce.JobContext)":{"name":"getInputDirRecursive","returnType":"boolean","args":["org.apache.hadoop.mapreduce.JobContext"],"exceptions":[]},"void setInputDirRecursive(org.apache.hadoop.mapreduce.Job, boolean)":{"name":"setInputDirRecursive","returnType":"void","args":["org.apache.hadoop.mapreduce.Job","boolean"],"exceptions":[]},"long getMaxSplitSize(org.apache.hadoop.mapreduce.JobContext)":{"name":"getMaxSplitSize","returnType":"long","args":["org.apache.hadoop.mapreduce.JobContext"],"exceptions":[]},"org.apache.hadoop.fs.PathFilter getInputPathFilter(org.apache.hadoop.mapreduce.JobContext)":{"name":"getInputPathFilter","returnType":"org.apache.hadoop.fs.PathFilter","args":["org.apache.hadoop.mapreduce.JobContext"],"exceptions":[]},"void setMinInputSplitSize(org.apache.hadoop.mapreduce.Job, long)":{"name":"setMinInputSplitSize","returnType":"void","args":["org.apache.hadoop.mapreduce.Job","long"],"exceptions":[]},"void setMaxInputSplitSize(org.apache.hadoop.mapreduce.Job, long)":{"name":"setMaxInputSplitSize","returnType":"void","args":["org.apache.hadoop.mapreduce.Job","long"],"exceptions":[]},"void setInputPaths(org.apache.hadoop.mapreduce.Job, java.lang.String) throws java.io.IOException":{"name":"setInputPaths","returnType":"void","args":["org.apache.hadoop.mapreduce.Job","java.lang.String"],"exceptions":["java.io.IOException"]},"void addInputPaths(org.apache.hadoop.mapreduce.Job, java.lang.String) throws java.io.IOException":{"name":"addInputPaths","returnType":"void","args":["org.apache.hadoop.mapreduce.Job","java.lang.String"],"exceptions":["java.io.IOException"]},"java.util.List getSplits(org.apache.hadoop.mapreduce.JobContext) throws java.io.IOException":{"name":"getSplits","returnType":"java.util.List","args":["org.apache.hadoop.mapreduce.JobContext"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.mapred.join.MultiFilterRecordReader":{"name":"org.apache.hadoop.mapred.join.MultiFilterRecordReader","methods":{"java.lang.Object createKey()":{"name":"createKey","returnType":"java.lang.Object","args":[],"exceptions":[]},"java.lang.Object createValue()":{"name":"createValue","returnType":"java.lang.Object","args":[],"exceptions":[]},"boolean next(org.apache.hadoop.io.WritableComparable, org.apache.hadoop.io.Writable) throws java.io.IOException":{"name":"next","returnType":"boolean","args":["org.apache.hadoop.io.WritableComparable","org.apache.hadoop.io.Writable"],"exceptions":["java.io.IOException"]},"int compareTo(java.lang.Object)":{"name":"compareTo","returnType":"int","args":["java.lang.Object"],"exceptions":[]},"boolean next(java.lang.Object, java.lang.Object) throws java.io.IOException":{"name":"next","returnType":"boolean","args":["java.lang.Object","java.lang.Object"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.io.Writable createValue()":{"name":"createValue","returnType":"org.apache.hadoop.io.Writable","args":[],"exceptions":[]}}},"org.apache.hadoop.mapreduce.lib.join.CompositeInputFormat":{"name":"org.apache.hadoop.mapreduce.lib.join.CompositeInputFormat","methods":{"java.lang.String compose(java.lang.String, java.lang.Class, [Lorg.apache.hadoop.fs.Path;)":{"name":"compose","returnType":"java.lang.String","args":["java.lang.String","java.lang.Class","[Lorg.apache.hadoop.fs.Path;"],"exceptions":[]},"void setFormat(org.apache.hadoop.conf.Configuration) throws java.io.IOException":{"name":"setFormat","returnType":"void","args":["org.apache.hadoop.conf.Configuration"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.mapreduce.RecordReader createRecordReader(org.apache.hadoop.mapreduce.InputSplit, org.apache.hadoop.mapreduce.TaskAttemptContext) throws java.lang.InterruptedException, java.io.IOException":{"name":"createRecordReader","returnType":"org.apache.hadoop.mapreduce.RecordReader","args":["org.apache.hadoop.mapreduce.InputSplit","org.apache.hadoop.mapreduce.TaskAttemptContext"],"exceptions":["java.lang.InterruptedException","java.io.IOException"]},"java.lang.String compose(java.lang.Class, java.lang.String)":{"name":"compose","returnType":"java.lang.String","args":["java.lang.Class","java.lang.String"],"exceptions":[]},"java.lang.String compose(java.lang.String, java.lang.Class, [Ljava.lang.String;)":{"name":"compose","returnType":"java.lang.String","args":["java.lang.String","java.lang.Class","[Ljava.lang.String;"],"exceptions":[]},"java.util.List getSplits(org.apache.hadoop.mapreduce.JobContext) throws java.lang.InterruptedException, java.io.IOException":{"name":"getSplits","returnType":"java.util.List","args":["org.apache.hadoop.mapreduce.JobContext"],"exceptions":["java.lang.InterruptedException","java.io.IOException"]}}},"org.apache.hadoop.mapred.FileOutputCommitter":{"name":"org.apache.hadoop.mapred.FileOutputCommitter","methods":{"void cleanupJob(org.apache.hadoop.mapred.JobContext) throws java.io.IOException":{"name":"cleanupJob","returnType":"void","args":["org.apache.hadoop.mapred.JobContext"],"exceptions":["java.io.IOException"]},"void setupTask(org.apache.hadoop.mapred.TaskAttemptContext) throws java.io.IOException":{"name":"setupTask","returnType":"void","args":["org.apache.hadoop.mapred.TaskAttemptContext"],"exceptions":["java.io.IOException"]},"boolean isCommitJobRepeatable(org.apache.hadoop.mapred.JobContext) throws java.io.IOException":{"name":"isCommitJobRepeatable","returnType":"boolean","args":["org.apache.hadoop.mapred.JobContext"],"exceptions":["java.io.IOException"]},"void abortTask(org.apache.hadoop.mapred.TaskAttemptContext) throws java.io.IOException":{"name":"abortTask","returnType":"void","args":["org.apache.hadoop.mapred.TaskAttemptContext"],"exceptions":["java.io.IOException"]},"boolean needsTaskCommit(org.apache.hadoop.mapred.TaskAttemptContext) throws java.io.IOException":{"name":"needsTaskCommit","returnType":"boolean","args":["org.apache.hadoop.mapred.TaskAttemptContext"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.Path getWorkPath(org.apache.hadoop.mapred.TaskAttemptContext, org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"getWorkPath","returnType":"org.apache.hadoop.fs.Path","args":["org.apache.hadoop.mapred.TaskAttemptContext","org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"void abortJob(org.apache.hadoop.mapred.JobContext, int) throws java.io.IOException":{"name":"abortJob","returnType":"void","args":["org.apache.hadoop.mapred.JobContext","int"],"exceptions":["java.io.IOException"]},"void commitTask(org.apache.hadoop.mapred.TaskAttemptContext) throws java.io.IOException":{"name":"commitTask","returnType":"void","args":["org.apache.hadoop.mapred.TaskAttemptContext"],"exceptions":["java.io.IOException"]},"boolean isRecoverySupported(org.apache.hadoop.mapred.JobContext) throws java.io.IOException":{"name":"isRecoverySupported","returnType":"boolean","args":["org.apache.hadoop.mapred.JobContext"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.Path getTaskAttemptPath(org.apache.hadoop.mapred.TaskAttemptContext) throws java.io.IOException":{"name":"getTaskAttemptPath","returnType":"org.apache.hadoop.fs.Path","args":["org.apache.hadoop.mapred.TaskAttemptContext"],"exceptions":["java.io.IOException"]},"void setupJob(org.apache.hadoop.mapred.JobContext) throws java.io.IOException":{"name":"setupJob","returnType":"void","args":["org.apache.hadoop.mapred.JobContext"],"exceptions":["java.io.IOException"]},"void commitJob(org.apache.hadoop.mapred.JobContext) throws java.io.IOException":{"name":"commitJob","returnType":"void","args":["org.apache.hadoop.mapred.JobContext"],"exceptions":["java.io.IOException"]},"void recoverTask(org.apache.hadoop.mapred.TaskAttemptContext) throws java.io.IOException":{"name":"recoverTask","returnType":"void","args":["org.apache.hadoop.mapred.TaskAttemptContext"],"exceptions":["java.io.IOException"]},"boolean isRecoverySupported()":{"name":"isRecoverySupported","returnType":"boolean","args":[],"exceptions":[]}}},"org.apache.hadoop.mapred.lib.aggregate.StringValueMin":{"name":"org.apache.hadoop.mapred.lib.aggregate.StringValueMin","methods":{}},"org.apache.hadoop.mapreduce.lib.partition.KeyFieldBasedPartitioner":{"name":"org.apache.hadoop.mapreduce.lib.partition.KeyFieldBasedPartitioner","methods":{"org.apache.hadoop.conf.Configuration getConf()":{"name":"getConf","returnType":"org.apache.hadoop.conf.Configuration","args":[],"exceptions":[]},"void setKeyFieldPartitionerOptions(org.apache.hadoop.mapreduce.Job, java.lang.String)":{"name":"setKeyFieldPartitionerOptions","returnType":"void","args":["org.apache.hadoop.mapreduce.Job","java.lang.String"],"exceptions":[]},"void setConf(org.apache.hadoop.conf.Configuration)":{"name":"setConf","returnType":"void","args":["org.apache.hadoop.conf.Configuration"],"exceptions":[]},"int getPartition(java.lang.Object, java.lang.Object, int)":{"name":"getPartition","returnType":"int","args":["java.lang.Object","java.lang.Object","int"],"exceptions":[]},"java.lang.String getKeyFieldPartitionerOption(org.apache.hadoop.mapreduce.JobContext)":{"name":"getKeyFieldPartitionerOption","returnType":"java.lang.String","args":["org.apache.hadoop.mapreduce.JobContext"],"exceptions":[]}}},"org.apache.hadoop.mapred.SequenceFileInputFilter":{"name":"org.apache.hadoop.mapred.SequenceFileInputFilter","methods":{"void setFilterClass(org.apache.hadoop.conf.Configuration, java.lang.Class)":{"name":"setFilterClass","returnType":"void","args":["org.apache.hadoop.conf.Configuration","java.lang.Class"],"exceptions":[]},"org.apache.hadoop.mapred.RecordReader getRecordReader(org.apache.hadoop.mapred.InputSplit, org.apache.hadoop.mapred.JobConf, org.apache.hadoop.mapred.Reporter) throws java.io.IOException":{"name":"getRecordReader","returnType":"org.apache.hadoop.mapred.RecordReader","args":["org.apache.hadoop.mapred.InputSplit","org.apache.hadoop.mapred.JobConf","org.apache.hadoop.mapred.Reporter"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.mapreduce.lib.join.InnerJoinRecordReader":{"name":"org.apache.hadoop.mapreduce.lib.join.InnerJoinRecordReader","methods":{}},"org.apache.hadoop.mapred.MapRunner":{"name":"org.apache.hadoop.mapred.MapRunner","methods":{"void configure(org.apache.hadoop.mapred.JobConf)":{"name":"configure","returnType":"void","args":["org.apache.hadoop.mapred.JobConf"],"exceptions":[]},"void run(org.apache.hadoop.mapred.RecordReader, org.apache.hadoop.mapred.OutputCollector, org.apache.hadoop.mapred.Reporter) throws java.io.IOException":{"name":"run","returnType":"void","args":["org.apache.hadoop.mapred.RecordReader","org.apache.hadoop.mapred.OutputCollector","org.apache.hadoop.mapred.Reporter"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.mapreduce.RecordReader":{"name":"org.apache.hadoop.mapreduce.RecordReader","methods":{"java.lang.Object getCurrentValue() throws java.lang.InterruptedException, java.io.IOException":{"name":"getCurrentValue","returnType":"java.lang.Object","args":[],"exceptions":["java.lang.InterruptedException","java.io.IOException"]},"void initialize(org.apache.hadoop.mapreduce.InputSplit, org.apache.hadoop.mapreduce.TaskAttemptContext) throws java.lang.InterruptedException, java.io.IOException":{"name":"initialize","returnType":"void","args":["org.apache.hadoop.mapreduce.InputSplit","org.apache.hadoop.mapreduce.TaskAttemptContext"],"exceptions":["java.lang.InterruptedException","java.io.IOException"]},"boolean nextKeyValue() throws java.lang.InterruptedException, java.io.IOException":{"name":"nextKeyValue","returnType":"boolean","args":[],"exceptions":["java.lang.InterruptedException","java.io.IOException"]},"float getProgress() throws java.lang.InterruptedException, java.io.IOException":{"name":"getProgress","returnType":"float","args":[],"exceptions":["java.lang.InterruptedException","java.io.IOException"]},"java.lang.Object getCurrentKey() throws java.lang.InterruptedException, java.io.IOException":{"name":"getCurrentKey","returnType":"java.lang.Object","args":[],"exceptions":["java.lang.InterruptedException","java.io.IOException"]},"void close() throws java.io.IOException":{"name":"close","returnType":"void","args":[],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.mapred.lib.ChainMapper":{"name":"org.apache.hadoop.mapred.lib.ChainMapper","methods":{"void map(java.lang.Object, java.lang.Object, org.apache.hadoop.mapred.OutputCollector, org.apache.hadoop.mapred.Reporter) throws java.io.IOException":{"name":"map","returnType":"void","args":["java.lang.Object","java.lang.Object","org.apache.hadoop.mapred.OutputCollector","org.apache.hadoop.mapred.Reporter"],"exceptions":["java.io.IOException"]},"void addMapper(org.apache.hadoop.mapred.JobConf, java.lang.Class, java.lang.Class, java.lang.Class, java.lang.Class, java.lang.Class, boolean, org.apache.hadoop.mapred.JobConf)":{"name":"addMapper","returnType":"void","args":["org.apache.hadoop.mapred.JobConf","java.lang.Class","java.lang.Class","java.lang.Class","java.lang.Class","java.lang.Class","boolean","org.apache.hadoop.mapred.JobConf"],"exceptions":[]},"void configure(org.apache.hadoop.mapred.JobConf)":{"name":"configure","returnType":"void","args":["org.apache.hadoop.mapred.JobConf"],"exceptions":[]},"void close() throws java.io.IOException":{"name":"close","returnType":"void","args":[],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.mapred.JobConfigurable":{"name":"org.apache.hadoop.mapred.JobConfigurable","methods":{"void configure(org.apache.hadoop.mapred.JobConf)":{"name":"configure","returnType":"void","args":["org.apache.hadoop.mapred.JobConf"],"exceptions":[]}}},"org.apache.hadoop.mapred.lib.KeyFieldBasedPartitioner":{"name":"org.apache.hadoop.mapred.lib.KeyFieldBasedPartitioner","methods":{"void configure(org.apache.hadoop.mapred.JobConf)":{"name":"configure","returnType":"void","args":["org.apache.hadoop.mapred.JobConf"],"exceptions":[]}}},"org.apache.hadoop.mapred.lib.MultithreadedMapRunner":{"name":"org.apache.hadoop.mapred.lib.MultithreadedMapRunner","methods":{"void configure(org.apache.hadoop.mapred.JobConf)":{"name":"configure","returnType":"void","args":["org.apache.hadoop.mapred.JobConf"],"exceptions":[]},"void run(org.apache.hadoop.mapred.RecordReader, org.apache.hadoop.mapred.OutputCollector, org.apache.hadoop.mapred.Reporter) throws java.io.IOException":{"name":"run","returnType":"void","args":["org.apache.hadoop.mapred.RecordReader","org.apache.hadoop.mapred.OutputCollector","org.apache.hadoop.mapred.Reporter"],"exceptions":["java.io.IOException"]}}}}} \ No newline at end of file diff --git a/bigtop-tests/smoke-tests/odpi-runtime/src/test/resources/hadoop-mapreduce-jar.list b/bigtop-tests/smoke-tests/odpi-runtime/src/test/resources/hadoop-mapreduce-jar.list new file mode 100644 index 00000000..03167870 --- /dev/null +++ b/bigtop-tests/smoke-tests/odpi-runtime/src/test/resources/hadoop-mapreduce-jar.list @@ -0,0 +1,22 @@ +netty-3\.6\.2\.Final[\.\-_].*jar +leveldbjni-all-1\.8[\.\-_].*jar +paranamer-2\.3[\.\-_].*jar +jackson-core-asl-1\.9\.13[\.\-_].*jar +jersey-server-1\.9[\.\-_].*jar +guice-3\.0[\.\-_].*jar +avro-1\.7\.[4-7][\.\-_].*jar +log4j-1\.2\.17[\.\-_].*jar +jackson-mapper-asl-1\.9\.13[\.\-_].*jar +snappy-java-1\.0\.[45](\.[0-9])?[\.\-_].*jar +jersey-core-1\.9[\.\-_].*jar +jersey-guice-1\.9[\.\-_].*jar +commons-compress-1\.4\.1[\.\-_].*jar +junit-4\.11[\.\-_].*jar +xz-1\.0[\.\-_].*jar +asm-3\.2[\.\-_].*jar +aopalliance-1\.0[\.\-_].*jar +javax\.inject-1[\.\-_].*jar +protobuf-java-2\.5\.0[\.\-_].*jar +commons-io-2\.4[\.\-_].*jar +hamcrest-core-1\.3[\.\-_].*jar +guice-servlet-3\.0[\.\-_].*jar diff --git a/bigtop-tests/smoke-tests/odpi-runtime/src/test/resources/hadoop-mapreduce.list b/bigtop-tests/smoke-tests/odpi-runtime/src/test/resources/hadoop-mapreduce.list new file mode 100644 index 00000000..cc06d808 --- /dev/null +++ b/bigtop-tests/smoke-tests/odpi-runtime/src/test/resources/hadoop-mapreduce.list @@ -0,0 +1,123 @@ +hadoop-mapreduce-client-core[\.\-_].*jar +bin +bin/mapred +sbin +sbin/mr-jobhistory-daemon\.sh +hadoop-mapreduce-client-common-2\.7\.[0-9][\.\-_].*jar +commons-digester-1\.8[\.\-_].*jar +curator-client-2\.7\.1[\.\-_].*jar +commons-beanutils(-core)?-1\.[78]\.0[\.\-_].*jar +jsp-api-2\.1[\.\-_].*jar +jets3t-0\.9\.0[\.\-_].*jar +hadoop-sls-2\.7\.[0-9][\.\-_].*jar +jackson-core-2\.2\.3[\.\-_].*jar +hadoop-mapreduce-client-hs-2\.7\.[0-9][\.\-_].*jar +hadoop-mapreduce-client-jobclient-2\.7\.[0-9].*-tests\.jar +hadoop-distcp[\.\-_].*jar +jaxb-api-2\.2\.2[\.\-_].*jar +api-util-1\.0\.0-M20[\.\-_].*jar +jettison-1\.1[\.\-_].*jar +commons-lang3-3\.3\.2[\.\-_].*jar +curator-framework-2\.7\.1[\.\-_].*jar +commons-io-2\.4[\.\-_].*jar +hadoop-mapreduce-client-hs-plugins[\.\-_].*jar +metrics-core-3\.0\.1[\.\-_].*jar +hadoop-mapreduce-client-app[\.\-_].*jar +jetty-util-6\.1\.26[\.\-_].*jar +avro-1\.7\.[4-7][\.\-_].*jar +jaxb-impl-2\.2\.3-1[\.\-_].*jar +hadoop-mapreduce-client-hs[\.\-_].*jar +hadoop-mapreduce-client-hs-plugins-2\.7\.[0-9][\.\-_].*jar +hadoop-sls[\.\-_].*jar +hadoop-ant[\.\-_].*jar +netty-3\.6\.2\.Final[\.\-_].*jar +httpcore-4\.[0-9]\.[0-9][\.\-_].*jar +jsch-0\.1\.(4[2-9]|[5-9]\d)[\.\-_].*jar +hadoop-mapreduce-client-jobclient[\.\-_].*jar +hadoop-archives[\.\-_].*jar +jersey-core-1\.9[\.\-_].*jar +jackson-mapper-asl-1\.9\.13[\.\-_].*jar +hadoop-mapreduce-examples-2\.7\.[0-9][\.\-_].*jar +hadoop-mapreduce-client-shuffle-2\.7\.[0-9][\.\-_].*jar +snappy-java-1\.0\.[45](\.[0-9])?[\.\-_].*jar +gson-2\.2\.4[\.\-_].*jar +hadoop-gridmix[\.\-_].*jar +commons-net-3\.1[\.\-_].*jar +asm-3\.2[\.\-_].*jar +commons-compress-1\.4\.1[\.\-_].*jar +mockito-all-1\.8\.5[\.\-_].*jar +hadoop-openstack[\.\-_].*jar +jackson-xc-1\.9\.13[\.\-_].*jar +junit-4\.11[\.\-_].*jar +jersey-json-1\.9[\.\-_].*jar +hadoop-distcp-2\.7\.[0-9][\.\-_].*jar +xmlenc-0\.52[\.\-_].*jar +api-asn1-api-1\.0\.0-M20[\.\-_].*jar +commons-codec-1\.4[\.\-_].*jar +jackson-core-asl-1\.9\.13[\.\-_].*jar +servlet-api-2\.5[\.\-_].*jar +paranamer-2\.3[\.\-_].*jar +hadoop-datajoin-2\.7\.[0-9][\.\-_].*jar +jetty-6\.1\.26[\.\-_].*jar +jersey-server-1\.9[\.\-_].*jar +hadoop-extras-2\.7\.[0-9][\.\-_].*jar +hadoop-mapreduce-client-shuffle[\.\-_].*jar +apacheds-i18n-2\.0\.0-M15[\.\-_].*jar +hadoop-auth-2\.7\.[0-9][\.\-_].*jar +hadoop-streaming-2\.7\.[0-9][\.\-_].*jar +hadoop-gridmix-2\.7\.[0-9][\.\-_].*jar +commons-math3-3\.1\.1[\.\-_].*jar +hadoop-auth[\.\-_].*jar +log4j-1\.2\.17[\.\-_].*jar +hamcrest-core-1\.3[\.\-_].*jar +hadoop-mapreduce-examples[\.\-_].*jar +hadoop-extras[\.\-_].*jar +stax-api-1\.0-2[\.\-_].*jar +hadoop-mapreduce-client-common[\.\-_].*jar +xz-1\.0[\.\-_].*jar +zookeeper-3\.4\.6[\.\-_].*jar +hadoop-archives-2\.7\.[0-9][\.\-_].*jar +activation-1\.1[\.\-_].*jar +hadoop-mapreduce-client-jobclient-2\.7\.[0-9][\.\-_].*jar +htrace-core-3\.1\.0-incubating[\.\-_].*jar +protobuf-java-2\.5\.0[\.\-_].*jar +hadoop-mapreduce-client-app-2\.7\.[0-9][\.\-_].*jar +hadoop-datajoin[\.\-_].*jar +apacheds-kerberos-codec-2\.0\.0-M15[\.\-_].*jar +java-xmlbuilder-0\.4[\.\-_].*jar +httpclient-4\.[0-9]\.[0-9][\.\-_].*jar +hadoop-rumen-2\.7\.[0-9][\.\-_].*jar +hadoop-mapreduce-client-core-2\.7\.[0-9][\.\-_].*jar +guava-11\.0\.2[\.\-_].*jar +jsr305-3\.0\.0[\.\-_].*jar +hadoop-streaming[\.\-_].*jar +hadoop-rumen[\.\-_].*jar +jackson-jaxrs-1\.9\.13[\.\-_].*jar +lib +lib/leveldbjni-all-1\.8[\.\-_].*jar +lib/commons-io-2\.4[\.\-_].*jar +lib/avro-1\.7\.[4-7][\.\-_].*jar +lib/jersey-guice-1\.9[\.\-_].*jar +lib/netty-3\.6\.2\.Final[\.\-_].*jar +lib/jersey-core-1\.9[\.\-_].*jar +lib/jackson-mapper-asl-1\.9\.13[\.\-_].*jar +lib/snappy-java-1\.0\.[45](\.[0-9])?[\.\-_].*jar +lib/asm-3\.2[\.\-_].*jar +lib/commons-compress-1\.4\.1[\.\-_].*jar +lib/aopalliance-1\.0[\.\-_].*jar +lib/junit-4\.11[\.\-_].*jar +lib/jackson-core-asl-1\.9\.13[\.\-_].*jar +lib/paranamer-2\.3[\.\-_].*jar +lib/jersey-server-1\.9[\.\-_].*jar +lib/log4j-1\.2\.17[\.\-_].*jar +lib/hamcrest-core-1\.3[\.\-_].*jar +lib/xz-1\.0[\.\-_].*jar +lib/javax\.inject-1[\.\-_].*jar +lib/protobuf-java-2\.5\.0[\.\-_].*jar +lib/guice-3\.0[\.\-_].*jar +lib/guice-servlet-3\.0[\.\-_].*jar +hadoop-openstack-2\.7\.[0-9][\.\-_].*jar +commons-httpclient-3\.1[\.\-_].*jar +commons-collections-3\.2\.[12][\.\-_].*jar +commons-logging-1\.1\.3[\.\-_].*jar +commons-lang-2\.6[\.\-_].*jar diff --git a/bigtop-tests/smoke-tests/odpi-runtime/src/test/resources/hadoop-subprojs.list b/bigtop-tests/smoke-tests/odpi-runtime/src/test/resources/hadoop-subprojs.list new file mode 100644 index 00000000..b0a5654d --- /dev/null +++ b/bigtop-tests/smoke-tests/odpi-runtime/src/test/resources/hadoop-subprojs.list @@ -0,0 +1,4 @@ +hadoop-annotations\.jar +hadoop-auth\.jar +hadoop-common\.jar +hadoop-nfs\.jar diff --git a/bigtop-tests/smoke-tests/odpi-runtime/src/test/resources/hadoop-yarn-api-2.7.3-api-report.json b/bigtop-tests/smoke-tests/odpi-runtime/src/test/resources/hadoop-yarn-api-2.7.3-api-report.json new file mode 100644 index 00000000..6ad5f180 --- /dev/null +++ b/bigtop-tests/smoke-tests/odpi-runtime/src/test/resources/hadoop-yarn-api-2.7.3-api-report.json @@ -0,0 +1 @@ +{"name":"hadoop-yarn-api","version":"2.7.3","classes":{"org.apache.hadoop.yarn.api.records.ApplicationAccessType":{"name":"org.apache.hadoop.yarn.api.records.ApplicationAccessType","methods":{"[Lorg.apache.hadoop.yarn.api.records.ApplicationAccessType; values()":{"name":"values","returnType":"[Lorg.apache.hadoop.yarn.api.records.ApplicationAccessType;","args":[],"exceptions":[]},"org.apache.hadoop.yarn.api.records.ApplicationAccessType valueOf(java.lang.String)":{"name":"valueOf","returnType":"org.apache.hadoop.yarn.api.records.ApplicationAccessType","args":["java.lang.String"],"exceptions":[]}}},"org.apache.hadoop.yarn.api.protocolrecords.RegisterApplicationMasterRequest":{"name":"org.apache.hadoop.yarn.api.protocolrecords.RegisterApplicationMasterRequest","methods":{"void setTrackingUrl(java.lang.String)":{"name":"setTrackingUrl","returnType":"void","args":["java.lang.String"],"exceptions":[]},"org.apache.hadoop.yarn.api.protocolrecords.RegisterApplicationMasterRequest newInstance(java.lang.String, int, java.lang.String)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.protocolrecords.RegisterApplicationMasterRequest","args":["java.lang.String","int","java.lang.String"],"exceptions":[]},"void setHost(java.lang.String)":{"name":"setHost","returnType":"void","args":["java.lang.String"],"exceptions":[]},"int getRpcPort()":{"name":"getRpcPort","returnType":"int","args":[],"exceptions":[]},"void setRpcPort(int)":{"name":"setRpcPort","returnType":"void","args":["int"],"exceptions":[]},"java.lang.String getHost()":{"name":"getHost","returnType":"java.lang.String","args":[],"exceptions":[]},"java.lang.String getTrackingUrl()":{"name":"getTrackingUrl","returnType":"java.lang.String","args":[],"exceptions":[]}}},"org.apache.hadoop.yarn.api.protocolrecords.StartContainerRequest":{"name":"org.apache.hadoop.yarn.api.protocolrecords.StartContainerRequest","methods":{"org.apache.hadoop.yarn.api.records.ContainerLaunchContext getContainerLaunchContext()":{"name":"getContainerLaunchContext","returnType":"org.apache.hadoop.yarn.api.records.ContainerLaunchContext","args":[],"exceptions":[]},"org.apache.hadoop.yarn.api.protocolrecords.StartContainerRequest newInstance(org.apache.hadoop.yarn.api.records.ContainerLaunchContext, org.apache.hadoop.yarn.api.records.Token)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.protocolrecords.StartContainerRequest","args":["org.apache.hadoop.yarn.api.records.ContainerLaunchContext","org.apache.hadoop.yarn.api.records.Token"],"exceptions":[]},"void setContainerToken(org.apache.hadoop.yarn.api.records.Token)":{"name":"setContainerToken","returnType":"void","args":["org.apache.hadoop.yarn.api.records.Token"],"exceptions":[]},"org.apache.hadoop.yarn.api.records.Token getContainerToken()":{"name":"getContainerToken","returnType":"org.apache.hadoop.yarn.api.records.Token","args":[],"exceptions":[]},"void setContainerLaunchContext(org.apache.hadoop.yarn.api.records.ContainerLaunchContext)":{"name":"setContainerLaunchContext","returnType":"void","args":["org.apache.hadoop.yarn.api.records.ContainerLaunchContext"],"exceptions":[]}}},"org.apache.hadoop.yarn.api.records.ResourceBlacklistRequest":{"name":"org.apache.hadoop.yarn.api.records.ResourceBlacklistRequest","methods":{"void setBlacklistAdditions(java.util.List)":{"name":"setBlacklistAdditions","returnType":"void","args":["java.util.List"],"exceptions":[]},"java.util.List getBlacklistRemovals()":{"name":"getBlacklistRemovals","returnType":"java.util.List","args":[],"exceptions":[]},"java.util.List getBlacklistAdditions()":{"name":"getBlacklistAdditions","returnType":"java.util.List","args":[],"exceptions":[]},"org.apache.hadoop.yarn.api.records.ResourceBlacklistRequest newInstance(java.util.List, java.util.List)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.records.ResourceBlacklistRequest","args":["java.util.List","java.util.List"],"exceptions":[]},"void setBlacklistRemovals(java.util.List)":{"name":"setBlacklistRemovals","returnType":"void","args":["java.util.List"],"exceptions":[]}}},"org.apache.hadoop.yarn.api.protocolrecords.KillApplicationRequest":{"name":"org.apache.hadoop.yarn.api.protocolrecords.KillApplicationRequest","methods":{"org.apache.hadoop.yarn.api.protocolrecords.KillApplicationRequest newInstance(org.apache.hadoop.yarn.api.records.ApplicationId)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.protocolrecords.KillApplicationRequest","args":["org.apache.hadoop.yarn.api.records.ApplicationId"],"exceptions":[]},"org.apache.hadoop.yarn.api.records.ApplicationId getApplicationId()":{"name":"getApplicationId","returnType":"org.apache.hadoop.yarn.api.records.ApplicationId","args":[],"exceptions":[]},"void setApplicationId(org.apache.hadoop.yarn.api.records.ApplicationId)":{"name":"setApplicationId","returnType":"void","args":["org.apache.hadoop.yarn.api.records.ApplicationId"],"exceptions":[]}}},"org.apache.hadoop.yarn.api.records.YarnApplicationAttemptState":{"name":"org.apache.hadoop.yarn.api.records.YarnApplicationAttemptState","methods":{"org.apache.hadoop.yarn.api.records.YarnApplicationAttemptState valueOf(java.lang.String)":{"name":"valueOf","returnType":"org.apache.hadoop.yarn.api.records.YarnApplicationAttemptState","args":["java.lang.String"],"exceptions":[]},"[Lorg.apache.hadoop.yarn.api.records.YarnApplicationAttemptState; values()":{"name":"values","returnType":"[Lorg.apache.hadoop.yarn.api.records.YarnApplicationAttemptState;","args":[],"exceptions":[]}}},"org.apache.hadoop.yarn.api.records.YarnClusterMetrics":{"name":"org.apache.hadoop.yarn.api.records.YarnClusterMetrics","methods":{"org.apache.hadoop.yarn.api.records.YarnClusterMetrics newInstance(int)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.records.YarnClusterMetrics","args":["int"],"exceptions":[]},"int getNumNodeManagers()":{"name":"getNumNodeManagers","returnType":"int","args":[],"exceptions":[]},"void setNumNodeManagers(int)":{"name":"setNumNodeManagers","returnType":"void","args":["int"],"exceptions":[]}}},"org.apache.hadoop.yarn.api.protocolrecords.AllocateRequest":{"name":"org.apache.hadoop.yarn.api.protocolrecords.AllocateRequest","methods":{"java.util.List getIncreaseRequests()":{"name":"getIncreaseRequests","returnType":"java.util.List","args":[],"exceptions":[]},"org.apache.hadoop.yarn.api.protocolrecords.AllocateRequest newInstance(int, float, java.util.List, java.util.List, org.apache.hadoop.yarn.api.records.ResourceBlacklistRequest, java.util.List)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.protocolrecords.AllocateRequest","args":["int","float","java.util.List","java.util.List","org.apache.hadoop.yarn.api.records.ResourceBlacklistRequest","java.util.List"],"exceptions":[]},"void setResponseId(int)":{"name":"setResponseId","returnType":"void","args":["int"],"exceptions":[]},"void setAskList(java.util.List)":{"name":"setAskList","returnType":"void","args":["java.util.List"],"exceptions":[]},"float getProgress()":{"name":"getProgress","returnType":"float","args":[],"exceptions":[]},"java.util.List getReleaseList()":{"name":"getReleaseList","returnType":"java.util.List","args":[],"exceptions":[]},"void setIncreaseRequests(java.util.List)":{"name":"setIncreaseRequests","returnType":"void","args":["java.util.List"],"exceptions":[]},"org.apache.hadoop.yarn.api.records.ResourceBlacklistRequest getResourceBlacklistRequest()":{"name":"getResourceBlacklistRequest","returnType":"org.apache.hadoop.yarn.api.records.ResourceBlacklistRequest","args":[],"exceptions":[]},"org.apache.hadoop.yarn.api.protocolrecords.AllocateRequest newInstance(int, float, java.util.List, java.util.List, org.apache.hadoop.yarn.api.records.ResourceBlacklistRequest)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.protocolrecords.AllocateRequest","args":["int","float","java.util.List","java.util.List","org.apache.hadoop.yarn.api.records.ResourceBlacklistRequest"],"exceptions":[]},"void setProgress(float)":{"name":"setProgress","returnType":"void","args":["float"],"exceptions":[]},"void setResourceBlacklistRequest(org.apache.hadoop.yarn.api.records.ResourceBlacklistRequest)":{"name":"setResourceBlacklistRequest","returnType":"void","args":["org.apache.hadoop.yarn.api.records.ResourceBlacklistRequest"],"exceptions":[]},"java.util.List getAskList()":{"name":"getAskList","returnType":"java.util.List","args":[],"exceptions":[]},"int getResponseId()":{"name":"getResponseId","returnType":"int","args":[],"exceptions":[]},"void setReleaseList(java.util.List)":{"name":"setReleaseList","returnType":"void","args":["java.util.List"],"exceptions":[]}}},"org.apache.hadoop.yarn.api.protocolrecords.GetQueueInfoResponse":{"name":"org.apache.hadoop.yarn.api.protocolrecords.GetQueueInfoResponse","methods":{"void setQueueInfo(org.apache.hadoop.yarn.api.records.QueueInfo)":{"name":"setQueueInfo","returnType":"void","args":["org.apache.hadoop.yarn.api.records.QueueInfo"],"exceptions":[]},"org.apache.hadoop.yarn.api.protocolrecords.GetQueueInfoResponse newInstance(org.apache.hadoop.yarn.api.records.QueueInfo)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.protocolrecords.GetQueueInfoResponse","args":["org.apache.hadoop.yarn.api.records.QueueInfo"],"exceptions":[]},"org.apache.hadoop.yarn.api.records.QueueInfo getQueueInfo()":{"name":"getQueueInfo","returnType":"org.apache.hadoop.yarn.api.records.QueueInfo","args":[],"exceptions":[]}}},"org.apache.hadoop.yarn.api.records.ApplicationReport":{"name":"org.apache.hadoop.yarn.api.records.ApplicationReport","methods":{"void setApplicationResourceUsageReport(org.apache.hadoop.yarn.api.records.ApplicationResourceUsageReport)":{"name":"setApplicationResourceUsageReport","returnType":"void","args":["org.apache.hadoop.yarn.api.records.ApplicationResourceUsageReport"],"exceptions":[]},"long getFinishTime()":{"name":"getFinishTime","returnType":"long","args":[],"exceptions":[]},"void setFinalApplicationStatus(org.apache.hadoop.yarn.api.records.FinalApplicationStatus)":{"name":"setFinalApplicationStatus","returnType":"void","args":["org.apache.hadoop.yarn.api.records.FinalApplicationStatus"],"exceptions":[]},"void setUser(java.lang.String)":{"name":"setUser","returnType":"void","args":["java.lang.String"],"exceptions":[]},"org.apache.hadoop.yarn.api.records.FinalApplicationStatus getFinalApplicationStatus()":{"name":"getFinalApplicationStatus","returnType":"org.apache.hadoop.yarn.api.records.FinalApplicationStatus","args":[],"exceptions":[]},"void setName(java.lang.String)":{"name":"setName","returnType":"void","args":["java.lang.String"],"exceptions":[]},"org.apache.hadoop.yarn.api.records.ApplicationResourceUsageReport getApplicationResourceUsageReport()":{"name":"getApplicationResourceUsageReport","returnType":"org.apache.hadoop.yarn.api.records.ApplicationResourceUsageReport","args":[],"exceptions":[]},"java.util.Set getApplicationTags()":{"name":"getApplicationTags","returnType":"java.util.Set","args":[],"exceptions":[]},"org.apache.hadoop.yarn.api.records.ApplicationReport newInstance(org.apache.hadoop.yarn.api.records.ApplicationId, org.apache.hadoop.yarn.api.records.ApplicationAttemptId, java.lang.String, java.lang.String, java.lang.String, java.lang.String, int, org.apache.hadoop.yarn.api.records.Token, org.apache.hadoop.yarn.api.records.YarnApplicationState, java.lang.String, java.lang.String, long, long, org.apache.hadoop.yarn.api.records.FinalApplicationStatus, org.apache.hadoop.yarn.api.records.ApplicationResourceUsageReport, java.lang.String, float, java.lang.String, org.apache.hadoop.yarn.api.records.Token)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.records.ApplicationReport","args":["org.apache.hadoop.yarn.api.records.ApplicationId","org.apache.hadoop.yarn.api.records.ApplicationAttemptId","java.lang.String","java.lang.String","java.lang.String","java.lang.String","int","org.apache.hadoop.yarn.api.records.Token","org.apache.hadoop.yarn.api.records.YarnApplicationState","java.lang.String","java.lang.String","long","long","org.apache.hadoop.yarn.api.records.FinalApplicationStatus","org.apache.hadoop.yarn.api.records.ApplicationResourceUsageReport","java.lang.String","float","java.lang.String","org.apache.hadoop.yarn.api.records.Token"],"exceptions":[]},"org.apache.hadoop.yarn.api.records.ApplicationId getApplicationId()":{"name":"getApplicationId","returnType":"org.apache.hadoop.yarn.api.records.ApplicationId","args":[],"exceptions":[]},"void setApplicationType(java.lang.String)":{"name":"setApplicationType","returnType":"void","args":["java.lang.String"],"exceptions":[]},"org.apache.hadoop.yarn.api.records.Token getClientToAMToken()":{"name":"getClientToAMToken","returnType":"org.apache.hadoop.yarn.api.records.Token","args":[],"exceptions":[]},"void setYarnApplicationState(org.apache.hadoop.yarn.api.records.YarnApplicationState)":{"name":"setYarnApplicationState","returnType":"void","args":["org.apache.hadoop.yarn.api.records.YarnApplicationState"],"exceptions":[]},"float getProgress()":{"name":"getProgress","returnType":"float","args":[],"exceptions":[]},"void setQueue(java.lang.String)":{"name":"setQueue","returnType":"void","args":["java.lang.String"],"exceptions":[]},"long getStartTime()":{"name":"getStartTime","returnType":"long","args":[],"exceptions":[]},"void setStartTime(long)":{"name":"setStartTime","returnType":"void","args":["long"],"exceptions":[]},"org.apache.hadoop.yarn.api.records.Token getAMRMToken()":{"name":"getAMRMToken","returnType":"org.apache.hadoop.yarn.api.records.Token","args":[],"exceptions":[]},"java.lang.String getHost()":{"name":"getHost","returnType":"java.lang.String","args":[],"exceptions":[]},"java.lang.String getUser()":{"name":"getUser","returnType":"java.lang.String","args":[],"exceptions":[]},"void setDiagnostics(java.lang.String)":{"name":"setDiagnostics","returnType":"void","args":["java.lang.String"],"exceptions":[]},"void setOriginalTrackingUrl(java.lang.String)":{"name":"setOriginalTrackingUrl","returnType":"void","args":["java.lang.String"],"exceptions":[]},"void setApplicationTags(java.util.Set)":{"name":"setApplicationTags","returnType":"void","args":["java.util.Set"],"exceptions":[]},"java.lang.String getQueue()":{"name":"getQueue","returnType":"java.lang.String","args":[],"exceptions":[]},"org.apache.hadoop.yarn.api.records.YarnApplicationState getYarnApplicationState()":{"name":"getYarnApplicationState","returnType":"org.apache.hadoop.yarn.api.records.YarnApplicationState","args":[],"exceptions":[]},"void setTrackingUrl(java.lang.String)":{"name":"setTrackingUrl","returnType":"void","args":["java.lang.String"],"exceptions":[]},"void setHost(java.lang.String)":{"name":"setHost","returnType":"void","args":["java.lang.String"],"exceptions":[]},"void setClientToAMToken(org.apache.hadoop.yarn.api.records.Token)":{"name":"setClientToAMToken","returnType":"void","args":["org.apache.hadoop.yarn.api.records.Token"],"exceptions":[]},"void setAMRMToken(org.apache.hadoop.yarn.api.records.Token)":{"name":"setAMRMToken","returnType":"void","args":["org.apache.hadoop.yarn.api.records.Token"],"exceptions":[]},"int getRpcPort()":{"name":"getRpcPort","returnType":"int","args":[],"exceptions":[]},"void setRpcPort(int)":{"name":"setRpcPort","returnType":"void","args":["int"],"exceptions":[]},"void setApplicationId(org.apache.hadoop.yarn.api.records.ApplicationId)":{"name":"setApplicationId","returnType":"void","args":["org.apache.hadoop.yarn.api.records.ApplicationId"],"exceptions":[]},"java.lang.String getTrackingUrl()":{"name":"getTrackingUrl","returnType":"java.lang.String","args":[],"exceptions":[]},"java.lang.String getDiagnostics()":{"name":"getDiagnostics","returnType":"java.lang.String","args":[],"exceptions":[]},"void setProgress(float)":{"name":"setProgress","returnType":"void","args":["float"],"exceptions":[]},"java.lang.String getName()":{"name":"getName","returnType":"java.lang.String","args":[],"exceptions":[]},"java.lang.String getOriginalTrackingUrl()":{"name":"getOriginalTrackingUrl","returnType":"java.lang.String","args":[],"exceptions":[]},"java.lang.String getApplicationType()":{"name":"getApplicationType","returnType":"java.lang.String","args":[],"exceptions":[]},"void setFinishTime(long)":{"name":"setFinishTime","returnType":"void","args":["long"],"exceptions":[]},"void setCurrentApplicationAttemptId(org.apache.hadoop.yarn.api.records.ApplicationAttemptId)":{"name":"setCurrentApplicationAttemptId","returnType":"void","args":["org.apache.hadoop.yarn.api.records.ApplicationAttemptId"],"exceptions":[]},"org.apache.hadoop.yarn.api.records.ApplicationAttemptId getCurrentApplicationAttemptId()":{"name":"getCurrentApplicationAttemptId","returnType":"org.apache.hadoop.yarn.api.records.ApplicationAttemptId","args":[],"exceptions":[]}}},"org.apache.hadoop.yarn.api.records.Resource":{"name":"org.apache.hadoop.yarn.api.records.Resource","methods":{"int hashCode()":{"name":"hashCode","returnType":"int","args":[],"exceptions":[]},"org.apache.hadoop.yarn.api.records.Resource newInstance(int, int)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.records.Resource","args":["int","int"],"exceptions":[]},"void setVirtualCores(int)":{"name":"setVirtualCores","returnType":"void","args":["int"],"exceptions":[]},"java.lang.String toString()":{"name":"toString","returnType":"java.lang.String","args":[],"exceptions":[]},"void setMemory(int)":{"name":"setMemory","returnType":"void","args":["int"],"exceptions":[]},"int getMemory()":{"name":"getMemory","returnType":"int","args":[],"exceptions":[]},"int getVirtualCores()":{"name":"getVirtualCores","returnType":"int","args":[],"exceptions":[]},"boolean equals(java.lang.Object)":{"name":"equals","returnType":"boolean","args":["java.lang.Object"],"exceptions":[]}}},"org.apache.hadoop.yarn.api.records.NodeReport":{"name":"org.apache.hadoop.yarn.api.records.NodeReport","methods":{"void setCapability(org.apache.hadoop.yarn.api.records.Resource)":{"name":"setCapability","returnType":"void","args":["org.apache.hadoop.yarn.api.records.Resource"],"exceptions":[]},"int getNumContainers()":{"name":"getNumContainers","returnType":"int","args":[],"exceptions":[]},"org.apache.hadoop.yarn.api.records.Resource getUsed()":{"name":"getUsed","returnType":"org.apache.hadoop.yarn.api.records.Resource","args":[],"exceptions":[]},"org.apache.hadoop.yarn.api.records.NodeId getNodeId()":{"name":"getNodeId","returnType":"org.apache.hadoop.yarn.api.records.NodeId","args":[],"exceptions":[]},"long getLastHealthReportTime()":{"name":"getLastHealthReportTime","returnType":"long","args":[],"exceptions":[]},"void setNodeId(org.apache.hadoop.yarn.api.records.NodeId)":{"name":"setNodeId","returnType":"void","args":["org.apache.hadoop.yarn.api.records.NodeId"],"exceptions":[]},"void setNodeLabels(java.util.Set)":{"name":"setNodeLabels","returnType":"void","args":["java.util.Set"],"exceptions":[]},"org.apache.hadoop.yarn.api.records.Resource getCapability()":{"name":"getCapability","returnType":"org.apache.hadoop.yarn.api.records.Resource","args":[],"exceptions":[]},"void setHealthReport(java.lang.String)":{"name":"setHealthReport","returnType":"void","args":["java.lang.String"],"exceptions":[]},"void setRackName(java.lang.String)":{"name":"setRackName","returnType":"void","args":["java.lang.String"],"exceptions":[]},"void setLastHealthReportTime(long)":{"name":"setLastHealthReportTime","returnType":"void","args":["long"],"exceptions":[]},"void setHttpAddress(java.lang.String)":{"name":"setHttpAddress","returnType":"void","args":["java.lang.String"],"exceptions":[]},"java.lang.String getRackName()":{"name":"getRackName","returnType":"java.lang.String","args":[],"exceptions":[]},"void setUsed(org.apache.hadoop.yarn.api.records.Resource)":{"name":"setUsed","returnType":"void","args":["org.apache.hadoop.yarn.api.records.Resource"],"exceptions":[]},"java.lang.String getHealthReport()":{"name":"getHealthReport","returnType":"java.lang.String","args":[],"exceptions":[]},"void setNodeState(org.apache.hadoop.yarn.api.records.NodeState)":{"name":"setNodeState","returnType":"void","args":["org.apache.hadoop.yarn.api.records.NodeState"],"exceptions":[]},"org.apache.hadoop.yarn.api.records.NodeReport newInstance(org.apache.hadoop.yarn.api.records.NodeId, org.apache.hadoop.yarn.api.records.NodeState, java.lang.String, java.lang.String, org.apache.hadoop.yarn.api.records.Resource, org.apache.hadoop.yarn.api.records.Resource, int, java.lang.String, long)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.records.NodeReport","args":["org.apache.hadoop.yarn.api.records.NodeId","org.apache.hadoop.yarn.api.records.NodeState","java.lang.String","java.lang.String","org.apache.hadoop.yarn.api.records.Resource","org.apache.hadoop.yarn.api.records.Resource","int","java.lang.String","long"],"exceptions":[]},"org.apache.hadoop.yarn.api.records.NodeState getNodeState()":{"name":"getNodeState","returnType":"org.apache.hadoop.yarn.api.records.NodeState","args":[],"exceptions":[]},"org.apache.hadoop.yarn.api.records.NodeReport newInstance(org.apache.hadoop.yarn.api.records.NodeId, org.apache.hadoop.yarn.api.records.NodeState, java.lang.String, java.lang.String, org.apache.hadoop.yarn.api.records.Resource, org.apache.hadoop.yarn.api.records.Resource, int, java.lang.String, long, java.util.Set)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.records.NodeReport","args":["org.apache.hadoop.yarn.api.records.NodeId","org.apache.hadoop.yarn.api.records.NodeState","java.lang.String","java.lang.String","org.apache.hadoop.yarn.api.records.Resource","org.apache.hadoop.yarn.api.records.Resource","int","java.lang.String","long","java.util.Set"],"exceptions":[]},"java.lang.String getHttpAddress()":{"name":"getHttpAddress","returnType":"java.lang.String","args":[],"exceptions":[]},"void setNumContainers(int)":{"name":"setNumContainers","returnType":"void","args":["int"],"exceptions":[]},"java.util.Set getNodeLabels()":{"name":"getNodeLabels","returnType":"java.util.Set","args":[],"exceptions":[]}}},"org.apache.hadoop.yarn.api.records.Priority":{"name":"org.apache.hadoop.yarn.api.records.Priority","methods":{"void setPriority(int)":{"name":"setPriority","returnType":"void","args":["int"],"exceptions":[]},"int hashCode()":{"name":"hashCode","returnType":"int","args":[],"exceptions":[]},"java.lang.String toString()":{"name":"toString","returnType":"java.lang.String","args":[],"exceptions":[]},"int compareTo(org.apache.hadoop.yarn.api.records.Priority)":{"name":"compareTo","returnType":"int","args":["org.apache.hadoop.yarn.api.records.Priority"],"exceptions":[]},"org.apache.hadoop.yarn.api.records.Priority newInstance(int)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.records.Priority","args":["int"],"exceptions":[]},"int getPriority()":{"name":"getPriority","returnType":"int","args":[],"exceptions":[]},"int compareTo(java.lang.Object)":{"name":"compareTo","returnType":"int","args":["java.lang.Object"],"exceptions":[]},"boolean equals(java.lang.Object)":{"name":"equals","returnType":"boolean","args":["java.lang.Object"],"exceptions":[]}}},"org.apache.hadoop.yarn.api.records.ApplicationAttemptId":{"name":"org.apache.hadoop.yarn.api.records.ApplicationAttemptId","methods":{"int compareTo(org.apache.hadoop.yarn.api.records.ApplicationAttemptId)":{"name":"compareTo","returnType":"int","args":["org.apache.hadoop.yarn.api.records.ApplicationAttemptId"],"exceptions":[]},"int hashCode()":{"name":"hashCode","returnType":"int","args":[],"exceptions":[]},"java.lang.String toString()":{"name":"toString","returnType":"java.lang.String","args":[],"exceptions":[]},"org.apache.hadoop.yarn.api.records.ApplicationId getApplicationId()":{"name":"getApplicationId","returnType":"org.apache.hadoop.yarn.api.records.ApplicationId","args":[],"exceptions":[]},"int getAttemptId()":{"name":"getAttemptId","returnType":"int","args":[],"exceptions":[]},"org.apache.hadoop.yarn.api.records.ApplicationAttemptId newInstance(org.apache.hadoop.yarn.api.records.ApplicationId, int)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.records.ApplicationAttemptId","args":["org.apache.hadoop.yarn.api.records.ApplicationId","int"],"exceptions":[]},"int compareTo(java.lang.Object)":{"name":"compareTo","returnType":"int","args":["java.lang.Object"],"exceptions":[]},"boolean equals(java.lang.Object)":{"name":"equals","returnType":"boolean","args":["java.lang.Object"],"exceptions":[]}}},"org.apache.hadoop.yarn.api.records.NMToken":{"name":"org.apache.hadoop.yarn.api.records.NMToken","methods":{"int hashCode()":{"name":"hashCode","returnType":"int","args":[],"exceptions":[]},"void setToken(org.apache.hadoop.yarn.api.records.Token)":{"name":"setToken","returnType":"void","args":["org.apache.hadoop.yarn.api.records.Token"],"exceptions":[]},"org.apache.hadoop.yarn.api.records.NodeId getNodeId()":{"name":"getNodeId","returnType":"org.apache.hadoop.yarn.api.records.NodeId","args":[],"exceptions":[]},"void setNodeId(org.apache.hadoop.yarn.api.records.NodeId)":{"name":"setNodeId","returnType":"void","args":["org.apache.hadoop.yarn.api.records.NodeId"],"exceptions":[]},"org.apache.hadoop.yarn.api.records.Token getToken()":{"name":"getToken","returnType":"org.apache.hadoop.yarn.api.records.Token","args":[],"exceptions":[]},"org.apache.hadoop.yarn.api.records.NMToken newInstance(org.apache.hadoop.yarn.api.records.NodeId, org.apache.hadoop.yarn.api.records.Token)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.records.NMToken","args":["org.apache.hadoop.yarn.api.records.NodeId","org.apache.hadoop.yarn.api.records.Token"],"exceptions":[]},"boolean equals(java.lang.Object)":{"name":"equals","returnType":"boolean","args":["java.lang.Object"],"exceptions":[]}}},"org.apache.hadoop.yarn.api.protocolrecords.FinishApplicationMasterRequest":{"name":"org.apache.hadoop.yarn.api.protocolrecords.FinishApplicationMasterRequest","methods":{"void setFinalApplicationStatus(org.apache.hadoop.yarn.api.records.FinalApplicationStatus)":{"name":"setFinalApplicationStatus","returnType":"void","args":["org.apache.hadoop.yarn.api.records.FinalApplicationStatus"],"exceptions":[]},"org.apache.hadoop.yarn.api.records.FinalApplicationStatus getFinalApplicationStatus()":{"name":"getFinalApplicationStatus","returnType":"org.apache.hadoop.yarn.api.records.FinalApplicationStatus","args":[],"exceptions":[]},"void setTrackingUrl(java.lang.String)":{"name":"setTrackingUrl","returnType":"void","args":["java.lang.String"],"exceptions":[]},"org.apache.hadoop.yarn.api.protocolrecords.FinishApplicationMasterRequest newInstance(org.apache.hadoop.yarn.api.records.FinalApplicationStatus, java.lang.String, java.lang.String)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.protocolrecords.FinishApplicationMasterRequest","args":["org.apache.hadoop.yarn.api.records.FinalApplicationStatus","java.lang.String","java.lang.String"],"exceptions":[]},"void setDiagnostics(java.lang.String)":{"name":"setDiagnostics","returnType":"void","args":["java.lang.String"],"exceptions":[]},"java.lang.String getTrackingUrl()":{"name":"getTrackingUrl","returnType":"java.lang.String","args":[],"exceptions":[]},"java.lang.String getDiagnostics()":{"name":"getDiagnostics","returnType":"java.lang.String","args":[],"exceptions":[]}}},"org.apache.hadoop.yarn.api.protocolrecords.SubmitApplicationResponse":{"name":"org.apache.hadoop.yarn.api.protocolrecords.SubmitApplicationResponse","methods":{"org.apache.hadoop.yarn.api.protocolrecords.SubmitApplicationResponse newInstance()":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.protocolrecords.SubmitApplicationResponse","args":[],"exceptions":[]}}},"org.apache.hadoop.yarn.api.protocolrecords.GetClusterMetricsRequest":{"name":"org.apache.hadoop.yarn.api.protocolrecords.GetClusterMetricsRequest","methods":{"org.apache.hadoop.yarn.api.protocolrecords.GetClusterMetricsRequest newInstance()":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.protocolrecords.GetClusterMetricsRequest","args":[],"exceptions":[]}}},"org.apache.hadoop.yarn.api.protocolrecords.StartContainersRequest":{"name":"org.apache.hadoop.yarn.api.protocolrecords.StartContainersRequest","methods":{"java.util.List getStartContainerRequests()":{"name":"getStartContainerRequests","returnType":"java.util.List","args":[],"exceptions":[]},"void setStartContainerRequests(java.util.List)":{"name":"setStartContainerRequests","returnType":"void","args":["java.util.List"],"exceptions":[]},"org.apache.hadoop.yarn.api.protocolrecords.StartContainersRequest newInstance(java.util.List)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.protocolrecords.StartContainersRequest","args":["java.util.List"],"exceptions":[]}}},"org.apache.hadoop.yarn.api.records.ContainerLaunchContext":{"name":"org.apache.hadoop.yarn.api.records.ContainerLaunchContext","methods":{"java.util.Map getApplicationACLs()":{"name":"getApplicationACLs","returnType":"java.util.Map","args":[],"exceptions":[]},"java.util.Map getServiceData()":{"name":"getServiceData","returnType":"java.util.Map","args":[],"exceptions":[]},"void setApplicationACLs(java.util.Map)":{"name":"setApplicationACLs","returnType":"void","args":["java.util.Map"],"exceptions":[]},"java.util.Map getLocalResources()":{"name":"getLocalResources","returnType":"java.util.Map","args":[],"exceptions":[]},"void setServiceData(java.util.Map)":{"name":"setServiceData","returnType":"void","args":["java.util.Map"],"exceptions":[]},"org.apache.hadoop.yarn.api.records.ContainerLaunchContext newInstance(java.util.Map, java.util.Map, java.util.List, java.util.Map, java.nio.ByteBuffer, java.util.Map)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.records.ContainerLaunchContext","args":["java.util.Map","java.util.Map","java.util.List","java.util.Map","java.nio.ByteBuffer","java.util.Map"],"exceptions":[]},"java.util.Map getEnvironment()":{"name":"getEnvironment","returnType":"java.util.Map","args":[],"exceptions":[]},"java.util.List getCommands()":{"name":"getCommands","returnType":"java.util.List","args":[],"exceptions":[]},"java.nio.ByteBuffer getTokens()":{"name":"getTokens","returnType":"java.nio.ByteBuffer","args":[],"exceptions":[]},"void setLocalResources(java.util.Map)":{"name":"setLocalResources","returnType":"void","args":["java.util.Map"],"exceptions":[]},"void setCommands(java.util.List)":{"name":"setCommands","returnType":"void","args":["java.util.List"],"exceptions":[]},"void setTokens(java.nio.ByteBuffer)":{"name":"setTokens","returnType":"void","args":["java.nio.ByteBuffer"],"exceptions":[]},"void setEnvironment(java.util.Map)":{"name":"setEnvironment","returnType":"void","args":["java.util.Map"],"exceptions":[]}}},"org.apache.hadoop.yarn.api.protocolrecords.GetApplicationReportRequest":{"name":"org.apache.hadoop.yarn.api.protocolrecords.GetApplicationReportRequest","methods":{"org.apache.hadoop.yarn.api.records.ApplicationId getApplicationId()":{"name":"getApplicationId","returnType":"org.apache.hadoop.yarn.api.records.ApplicationId","args":[],"exceptions":[]},"void setApplicationId(org.apache.hadoop.yarn.api.records.ApplicationId)":{"name":"setApplicationId","returnType":"void","args":["org.apache.hadoop.yarn.api.records.ApplicationId"],"exceptions":[]},"org.apache.hadoop.yarn.api.protocolrecords.GetApplicationReportRequest newInstance(org.apache.hadoop.yarn.api.records.ApplicationId)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.protocolrecords.GetApplicationReportRequest","args":["org.apache.hadoop.yarn.api.records.ApplicationId"],"exceptions":[]}}},"org.apache.hadoop.yarn.api.protocolrecords.StopContainersResponse":{"name":"org.apache.hadoop.yarn.api.protocolrecords.StopContainersResponse","methods":{"void setFailedRequests(java.util.Map)":{"name":"setFailedRequests","returnType":"void","args":["java.util.Map"],"exceptions":[]},"void setSuccessfullyStoppedContainers(java.util.List)":{"name":"setSuccessfullyStoppedContainers","returnType":"void","args":["java.util.List"],"exceptions":[]},"java.util.List getSuccessfullyStoppedContainers()":{"name":"getSuccessfullyStoppedContainers","returnType":"java.util.List","args":[],"exceptions":[]},"org.apache.hadoop.yarn.api.protocolrecords.StopContainersResponse newInstance(java.util.List, java.util.Map)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.protocolrecords.StopContainersResponse","args":["java.util.List","java.util.Map"],"exceptions":[]},"java.util.Map getFailedRequests()":{"name":"getFailedRequests","returnType":"java.util.Map","args":[],"exceptions":[]}}},"org.apache.hadoop.yarn.api.records.QueueState":{"name":"org.apache.hadoop.yarn.api.records.QueueState","methods":{"org.apache.hadoop.yarn.api.records.QueueState valueOf(java.lang.String)":{"name":"valueOf","returnType":"org.apache.hadoop.yarn.api.records.QueueState","args":["java.lang.String"],"exceptions":[]},"[Lorg.apache.hadoop.yarn.api.records.QueueState; values()":{"name":"values","returnType":"[Lorg.apache.hadoop.yarn.api.records.QueueState;","args":[],"exceptions":[]}}},"org.apache.hadoop.yarn.api.records.ApplicationId":{"name":"org.apache.hadoop.yarn.api.records.ApplicationId","methods":{"org.apache.hadoop.yarn.api.records.ApplicationId newInstance(long, int)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.records.ApplicationId","args":["long","int"],"exceptions":[]},"int hashCode()":{"name":"hashCode","returnType":"int","args":[],"exceptions":[]},"long getClusterTimestamp()":{"name":"getClusterTimestamp","returnType":"long","args":[],"exceptions":[]},"java.lang.String toString()":{"name":"toString","returnType":"java.lang.String","args":[],"exceptions":[]},"int getId()":{"name":"getId","returnType":"int","args":[],"exceptions":[]},"int compareTo(java.lang.Object)":{"name":"compareTo","returnType":"int","args":["java.lang.Object"],"exceptions":[]},"boolean equals(java.lang.Object)":{"name":"equals","returnType":"boolean","args":["java.lang.Object"],"exceptions":[]},"int compareTo(org.apache.hadoop.yarn.api.records.ApplicationId)":{"name":"compareTo","returnType":"int","args":["org.apache.hadoop.yarn.api.records.ApplicationId"],"exceptions":[]}}},"org.apache.hadoop.yarn.api.protocolrecords.GetClusterMetricsResponse":{"name":"org.apache.hadoop.yarn.api.protocolrecords.GetClusterMetricsResponse","methods":{"org.apache.hadoop.yarn.api.records.YarnClusterMetrics getClusterMetrics()":{"name":"getClusterMetrics","returnType":"org.apache.hadoop.yarn.api.records.YarnClusterMetrics","args":[],"exceptions":[]},"org.apache.hadoop.yarn.api.protocolrecords.GetClusterMetricsResponse newInstance(org.apache.hadoop.yarn.api.records.YarnClusterMetrics)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.protocolrecords.GetClusterMetricsResponse","args":["org.apache.hadoop.yarn.api.records.YarnClusterMetrics"],"exceptions":[]},"void setClusterMetrics(org.apache.hadoop.yarn.api.records.YarnClusterMetrics)":{"name":"setClusterMetrics","returnType":"void","args":["org.apache.hadoop.yarn.api.records.YarnClusterMetrics"],"exceptions":[]}}},"org.apache.hadoop.yarn.api.protocolrecords.GetQueueUserAclsInfoResponse":{"name":"org.apache.hadoop.yarn.api.protocolrecords.GetQueueUserAclsInfoResponse","methods":{"org.apache.hadoop.yarn.api.protocolrecords.GetQueueUserAclsInfoResponse newInstance(java.util.List)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.protocolrecords.GetQueueUserAclsInfoResponse","args":["java.util.List"],"exceptions":[]},"java.util.List getUserAclsInfoList()":{"name":"getUserAclsInfoList","returnType":"java.util.List","args":[],"exceptions":[]},"void setUserAclsInfoList(java.util.List)":{"name":"setUserAclsInfoList","returnType":"void","args":["java.util.List"],"exceptions":[]}}},"org.apache.hadoop.yarn.api.records.FinalApplicationStatus":{"name":"org.apache.hadoop.yarn.api.records.FinalApplicationStatus","methods":{"[Lorg.apache.hadoop.yarn.api.records.FinalApplicationStatus; values()":{"name":"values","returnType":"[Lorg.apache.hadoop.yarn.api.records.FinalApplicationStatus;","args":[],"exceptions":[]},"org.apache.hadoop.yarn.api.records.FinalApplicationStatus valueOf(java.lang.String)":{"name":"valueOf","returnType":"org.apache.hadoop.yarn.api.records.FinalApplicationStatus","args":["java.lang.String"],"exceptions":[]}}},"org.apache.hadoop.yarn.api.protocolrecords.GetApplicationsResponse":{"name":"org.apache.hadoop.yarn.api.protocolrecords.GetApplicationsResponse","methods":{"java.util.List getApplicationList()":{"name":"getApplicationList","returnType":"java.util.List","args":[],"exceptions":[]},"org.apache.hadoop.yarn.api.protocolrecords.GetApplicationsResponse newInstance(java.util.List)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.protocolrecords.GetApplicationsResponse","args":["java.util.List"],"exceptions":[]},"void setApplicationList(java.util.List)":{"name":"setApplicationList","returnType":"void","args":["java.util.List"],"exceptions":[]}}},"org.apache.hadoop.yarn.api.records.YarnApplicationState":{"name":"org.apache.hadoop.yarn.api.records.YarnApplicationState","methods":{"[Lorg.apache.hadoop.yarn.api.records.YarnApplicationState; values()":{"name":"values","returnType":"[Lorg.apache.hadoop.yarn.api.records.YarnApplicationState;","args":[],"exceptions":[]},"org.apache.hadoop.yarn.api.records.YarnApplicationState valueOf(java.lang.String)":{"name":"valueOf","returnType":"org.apache.hadoop.yarn.api.records.YarnApplicationState","args":["java.lang.String"],"exceptions":[]}}},"org.apache.hadoop.yarn.api.records.URL":{"name":"org.apache.hadoop.yarn.api.records.URL","methods":{"java.lang.String getFile()":{"name":"getFile","returnType":"java.lang.String","args":[],"exceptions":[]},"void setPort(int)":{"name":"setPort","returnType":"void","args":["int"],"exceptions":[]},"void setUserInfo(java.lang.String)":{"name":"setUserInfo","returnType":"void","args":["java.lang.String"],"exceptions":[]},"void setHost(java.lang.String)":{"name":"setHost","returnType":"void","args":["java.lang.String"],"exceptions":[]},"void setScheme(java.lang.String)":{"name":"setScheme","returnType":"void","args":["java.lang.String"],"exceptions":[]},"java.lang.String getHost()":{"name":"getHost","returnType":"java.lang.String","args":[],"exceptions":[]},"void setFile(java.lang.String)":{"name":"setFile","returnType":"void","args":["java.lang.String"],"exceptions":[]},"java.lang.String getScheme()":{"name":"getScheme","returnType":"java.lang.String","args":[],"exceptions":[]},"java.lang.String getUserInfo()":{"name":"getUserInfo","returnType":"java.lang.String","args":[],"exceptions":[]},"org.apache.hadoop.yarn.api.records.URL newInstance(java.lang.String, java.lang.String, int, java.lang.String)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.records.URL","args":["java.lang.String","java.lang.String","int","java.lang.String"],"exceptions":[]},"int getPort()":{"name":"getPort","returnType":"int","args":[],"exceptions":[]}}},"org.apache.hadoop.yarn.api.protocolrecords.FinishApplicationMasterResponse":{"name":"org.apache.hadoop.yarn.api.protocolrecords.FinishApplicationMasterResponse","methods":{"void setIsUnregistered(boolean)":{"name":"setIsUnregistered","returnType":"void","args":["boolean"],"exceptions":[]},"org.apache.hadoop.yarn.api.protocolrecords.FinishApplicationMasterResponse newInstance(boolean)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.protocolrecords.FinishApplicationMasterResponse","args":["boolean"],"exceptions":[]},"boolean getIsUnregistered()":{"name":"getIsUnregistered","returnType":"boolean","args":[],"exceptions":[]}}},"org.apache.hadoop.yarn.api.ApplicationMasterProtocol":{"name":"org.apache.hadoop.yarn.api.ApplicationMasterProtocol","methods":{"org.apache.hadoop.yarn.api.protocolrecords.FinishApplicationMasterResponse finishApplicationMaster(org.apache.hadoop.yarn.api.protocolrecords.FinishApplicationMasterRequest) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"finishApplicationMaster","returnType":"org.apache.hadoop.yarn.api.protocolrecords.FinishApplicationMasterResponse","args":["org.apache.hadoop.yarn.api.protocolrecords.FinishApplicationMasterRequest"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"org.apache.hadoop.yarn.api.protocolrecords.RegisterApplicationMasterResponse registerApplicationMaster(org.apache.hadoop.yarn.api.protocolrecords.RegisterApplicationMasterRequest) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"registerApplicationMaster","returnType":"org.apache.hadoop.yarn.api.protocolrecords.RegisterApplicationMasterResponse","args":["org.apache.hadoop.yarn.api.protocolrecords.RegisterApplicationMasterRequest"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"org.apache.hadoop.yarn.api.protocolrecords.AllocateResponse allocate(org.apache.hadoop.yarn.api.protocolrecords.AllocateRequest) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"allocate","returnType":"org.apache.hadoop.yarn.api.protocolrecords.AllocateResponse","args":["org.apache.hadoop.yarn.api.protocolrecords.AllocateRequest"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]}}},"org.apache.hadoop.yarn.api.protocolrecords.GetApplicationReportResponse":{"name":"org.apache.hadoop.yarn.api.protocolrecords.GetApplicationReportResponse","methods":{"void setApplicationReport(org.apache.hadoop.yarn.api.records.ApplicationReport)":{"name":"setApplicationReport","returnType":"void","args":["org.apache.hadoop.yarn.api.records.ApplicationReport"],"exceptions":[]},"org.apache.hadoop.yarn.api.records.ApplicationReport getApplicationReport()":{"name":"getApplicationReport","returnType":"org.apache.hadoop.yarn.api.records.ApplicationReport","args":[],"exceptions":[]},"org.apache.hadoop.yarn.api.protocolrecords.GetApplicationReportResponse newInstance(org.apache.hadoop.yarn.api.records.ApplicationReport)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.protocolrecords.GetApplicationReportResponse","args":["org.apache.hadoop.yarn.api.records.ApplicationReport"],"exceptions":[]}}},"org.apache.hadoop.yarn.api.protocolrecords.StopContainersRequest":{"name":"org.apache.hadoop.yarn.api.protocolrecords.StopContainersRequest","methods":{"void setContainerIds(java.util.List)":{"name":"setContainerIds","returnType":"void","args":["java.util.List"],"exceptions":[]},"org.apache.hadoop.yarn.api.protocolrecords.StopContainersRequest newInstance(java.util.List)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.protocolrecords.StopContainersRequest","args":["java.util.List"],"exceptions":[]},"java.util.List getContainerIds()":{"name":"getContainerIds","returnType":"java.util.List","args":[],"exceptions":[]}}},"org.apache.hadoop.yarn.api.protocolrecords.GetApplicationsRequest":{"name":"org.apache.hadoop.yarn.api.protocolrecords.GetApplicationsRequest","methods":{"void setStartRange(long, long) throws java.lang.IllegalArgumentException":{"name":"setStartRange","returnType":"void","args":["long","long"],"exceptions":["java.lang.IllegalArgumentException"]},"java.util.Set getApplicationTags()":{"name":"getApplicationTags","returnType":"java.util.Set","args":[],"exceptions":[]},"org.apache.hadoop.yarn.api.protocolrecords.ApplicationsRequestScope getScope()":{"name":"getScope","returnType":"org.apache.hadoop.yarn.api.protocolrecords.ApplicationsRequestScope","args":[],"exceptions":[]},"void setScope(org.apache.hadoop.yarn.api.protocolrecords.ApplicationsRequestScope)":{"name":"setScope","returnType":"void","args":["org.apache.hadoop.yarn.api.protocolrecords.ApplicationsRequestScope"],"exceptions":[]},"org.apache.hadoop.yarn.api.protocolrecords.GetApplicationsRequest newInstance(org.apache.hadoop.yarn.api.protocolrecords.ApplicationsRequestScope)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.protocolrecords.GetApplicationsRequest","args":["org.apache.hadoop.yarn.api.protocolrecords.ApplicationsRequestScope"],"exceptions":[]},"org.apache.hadoop.yarn.api.protocolrecords.GetApplicationsRequest newInstance(org.apache.hadoop.yarn.api.protocolrecords.ApplicationsRequestScope, java.util.Set, java.util.Set, java.util.Set, java.util.Set, java.util.EnumSet, org.apache.commons.lang.math.LongRange, org.apache.commons.lang.math.LongRange, java.lang.Long)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.protocolrecords.GetApplicationsRequest","args":["org.apache.hadoop.yarn.api.protocolrecords.ApplicationsRequestScope","java.util.Set","java.util.Set","java.util.Set","java.util.Set","java.util.EnumSet","org.apache.commons.lang.math.LongRange","org.apache.commons.lang.math.LongRange","java.lang.Long"],"exceptions":[]},"long getLimit()":{"name":"getLimit","returnType":"long","args":[],"exceptions":[]},"java.util.EnumSet getApplicationStates()":{"name":"getApplicationStates","returnType":"java.util.EnumSet","args":[],"exceptions":[]},"void setFinishRange(org.apache.commons.lang.math.LongRange)":{"name":"setFinishRange","returnType":"void","args":["org.apache.commons.lang.math.LongRange"],"exceptions":[]},"void setUsers(java.util.Set)":{"name":"setUsers","returnType":"void","args":["java.util.Set"],"exceptions":[]},"org.apache.commons.lang.math.LongRange getFinishRange()":{"name":"getFinishRange","returnType":"org.apache.commons.lang.math.LongRange","args":[],"exceptions":[]},"void setApplicationTags(java.util.Set)":{"name":"setApplicationTags","returnType":"void","args":["java.util.Set"],"exceptions":[]},"void setApplicationStates(java.util.EnumSet)":{"name":"setApplicationStates","returnType":"void","args":["java.util.EnumSet"],"exceptions":[]},"org.apache.hadoop.yarn.api.protocolrecords.GetApplicationsRequest newInstance(java.util.EnumSet)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.protocolrecords.GetApplicationsRequest","args":["java.util.EnumSet"],"exceptions":[]},"java.util.Set getQueues()":{"name":"getQueues","returnType":"java.util.Set","args":[],"exceptions":[]},"java.util.Set getUsers()":{"name":"getUsers","returnType":"java.util.Set","args":[],"exceptions":[]},"void setLimit(long)":{"name":"setLimit","returnType":"void","args":["long"],"exceptions":[]},"org.apache.hadoop.yarn.api.protocolrecords.GetApplicationsRequest newInstance(java.util.Set, java.util.EnumSet)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.protocolrecords.GetApplicationsRequest","args":["java.util.Set","java.util.EnumSet"],"exceptions":[]},"void setApplicationStates(java.util.Set)":{"name":"setApplicationStates","returnType":"void","args":["java.util.Set"],"exceptions":[]},"org.apache.commons.lang.math.LongRange getStartRange()":{"name":"getStartRange","returnType":"org.apache.commons.lang.math.LongRange","args":[],"exceptions":[]},"void setApplicationTypes(java.util.Set)":{"name":"setApplicationTypes","returnType":"void","args":["java.util.Set"],"exceptions":[]},"void setQueues(java.util.Set)":{"name":"setQueues","returnType":"void","args":["java.util.Set"],"exceptions":[]},"void setFinishRange(long, long)":{"name":"setFinishRange","returnType":"void","args":["long","long"],"exceptions":[]},"org.apache.hadoop.yarn.api.protocolrecords.GetApplicationsRequest newInstance(java.util.Set)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.protocolrecords.GetApplicationsRequest","args":["java.util.Set"],"exceptions":[]},"void setStartRange(org.apache.commons.lang.math.LongRange)":{"name":"setStartRange","returnType":"void","args":["org.apache.commons.lang.math.LongRange"],"exceptions":[]},"org.apache.hadoop.yarn.api.protocolrecords.GetApplicationsRequest newInstance()":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.protocolrecords.GetApplicationsRequest","args":[],"exceptions":[]},"java.util.Set getApplicationTypes()":{"name":"getApplicationTypes","returnType":"java.util.Set","args":[],"exceptions":[]}}},"org.apache.hadoop.yarn.api.records.LocalResourceType":{"name":"org.apache.hadoop.yarn.api.records.LocalResourceType","methods":{"[Lorg.apache.hadoop.yarn.api.records.LocalResourceType; values()":{"name":"values","returnType":"[Lorg.apache.hadoop.yarn.api.records.LocalResourceType;","args":[],"exceptions":[]},"org.apache.hadoop.yarn.api.records.LocalResourceType valueOf(java.lang.String)":{"name":"valueOf","returnType":"org.apache.hadoop.yarn.api.records.LocalResourceType","args":["java.lang.String"],"exceptions":[]}}},"org.apache.hadoop.yarn.api.records.ApplicationResourceUsageReport":{"name":"org.apache.hadoop.yarn.api.records.ApplicationResourceUsageReport","methods":{"long getVcoreSeconds()":{"name":"getVcoreSeconds","returnType":"long","args":[],"exceptions":[]},"int getNumUsedContainers()":{"name":"getNumUsedContainers","returnType":"int","args":[],"exceptions":[]},"long getMemorySeconds()":{"name":"getMemorySeconds","returnType":"long","args":[],"exceptions":[]},"void setMemorySeconds(long)":{"name":"setMemorySeconds","returnType":"void","args":["long"],"exceptions":[]},"void setUsedResources(org.apache.hadoop.yarn.api.records.Resource)":{"name":"setUsedResources","returnType":"void","args":["org.apache.hadoop.yarn.api.records.Resource"],"exceptions":[]},"void setNeededResources(org.apache.hadoop.yarn.api.records.Resource)":{"name":"setNeededResources","returnType":"void","args":["org.apache.hadoop.yarn.api.records.Resource"],"exceptions":[]},"org.apache.hadoop.yarn.api.records.Resource getReservedResources()":{"name":"getReservedResources","returnType":"org.apache.hadoop.yarn.api.records.Resource","args":[],"exceptions":[]},"org.apache.hadoop.yarn.api.records.Resource getNeededResources()":{"name":"getNeededResources","returnType":"org.apache.hadoop.yarn.api.records.Resource","args":[],"exceptions":[]},"void setNumUsedContainers(int)":{"name":"setNumUsedContainers","returnType":"void","args":["int"],"exceptions":[]},"void setNumReservedContainers(int)":{"name":"setNumReservedContainers","returnType":"void","args":["int"],"exceptions":[]},"void setVcoreSeconds(long)":{"name":"setVcoreSeconds","returnType":"void","args":["long"],"exceptions":[]},"int getNumReservedContainers()":{"name":"getNumReservedContainers","returnType":"int","args":[],"exceptions":[]},"org.apache.hadoop.yarn.api.records.ApplicationResourceUsageReport newInstance(int, int, org.apache.hadoop.yarn.api.records.Resource, org.apache.hadoop.yarn.api.records.Resource, org.apache.hadoop.yarn.api.records.Resource, long, long)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.records.ApplicationResourceUsageReport","args":["int","int","org.apache.hadoop.yarn.api.records.Resource","org.apache.hadoop.yarn.api.records.Resource","org.apache.hadoop.yarn.api.records.Resource","long","long"],"exceptions":[]},"org.apache.hadoop.yarn.api.records.Resource getUsedResources()":{"name":"getUsedResources","returnType":"org.apache.hadoop.yarn.api.records.Resource","args":[],"exceptions":[]},"void setReservedResources(org.apache.hadoop.yarn.api.records.Resource)":{"name":"setReservedResources","returnType":"void","args":["org.apache.hadoop.yarn.api.records.Resource"],"exceptions":[]}}},"org.apache.hadoop.yarn.api.protocolrecords.GetQueueInfoRequest":{"name":"org.apache.hadoop.yarn.api.protocolrecords.GetQueueInfoRequest","methods":{"void setQueueName(java.lang.String)":{"name":"setQueueName","returnType":"void","args":["java.lang.String"],"exceptions":[]},"org.apache.hadoop.yarn.api.protocolrecords.GetQueueInfoRequest newInstance(java.lang.String, boolean, boolean, boolean)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.protocolrecords.GetQueueInfoRequest","args":["java.lang.String","boolean","boolean","boolean"],"exceptions":[]},"boolean getRecursive()":{"name":"getRecursive","returnType":"boolean","args":[],"exceptions":[]},"java.lang.String getQueueName()":{"name":"getQueueName","returnType":"java.lang.String","args":[],"exceptions":[]},"void setIncludeChildQueues(boolean)":{"name":"setIncludeChildQueues","returnType":"void","args":["boolean"],"exceptions":[]},"boolean getIncludeApplications()":{"name":"getIncludeApplications","returnType":"boolean","args":[],"exceptions":[]},"boolean getIncludeChildQueues()":{"name":"getIncludeChildQueues","returnType":"boolean","args":[],"exceptions":[]},"void setRecursive(boolean)":{"name":"setRecursive","returnType":"void","args":["boolean"],"exceptions":[]},"void setIncludeApplications(boolean)":{"name":"setIncludeApplications","returnType":"void","args":["boolean"],"exceptions":[]}}},"org.apache.hadoop.yarn.api.protocolrecords.AllocateResponse":{"name":"org.apache.hadoop.yarn.api.protocolrecords.AllocateResponse","methods":{"void setIncreasedContainers(java.util.List)":{"name":"setIncreasedContainers","returnType":"void","args":["java.util.List"],"exceptions":[]},"void setDecreasedContainers(java.util.List)":{"name":"setDecreasedContainers","returnType":"void","args":["java.util.List"],"exceptions":[]},"org.apache.hadoop.yarn.api.protocolrecords.AllocateResponse newInstance(int, java.util.List, java.util.List, java.util.List, org.apache.hadoop.yarn.api.records.Resource, org.apache.hadoop.yarn.api.records.AMCommand, int, org.apache.hadoop.yarn.api.records.PreemptionMessage, java.util.List, java.util.List, java.util.List)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.protocolrecords.AllocateResponse","args":["int","java.util.List","java.util.List","java.util.List","org.apache.hadoop.yarn.api.records.Resource","org.apache.hadoop.yarn.api.records.AMCommand","int","org.apache.hadoop.yarn.api.records.PreemptionMessage","java.util.List","java.util.List","java.util.List"],"exceptions":[]},"void setUpdatedNodes(java.util.List)":{"name":"setUpdatedNodes","returnType":"void","args":["java.util.List"],"exceptions":[]},"void setResponseId(int)":{"name":"setResponseId","returnType":"void","args":["int"],"exceptions":[]},"java.util.List getNMTokens()":{"name":"getNMTokens","returnType":"java.util.List","args":[],"exceptions":[]},"java.util.List getUpdatedNodes()":{"name":"getUpdatedNodes","returnType":"java.util.List","args":[],"exceptions":[]},"java.util.List getIncreasedContainers()":{"name":"getIncreasedContainers","returnType":"java.util.List","args":[],"exceptions":[]},"org.apache.hadoop.yarn.api.records.AMCommand getAMCommand()":{"name":"getAMCommand","returnType":"org.apache.hadoop.yarn.api.records.AMCommand","args":[],"exceptions":[]},"void setNMTokens(java.util.List)":{"name":"setNMTokens","returnType":"void","args":["java.util.List"],"exceptions":[]},"org.apache.hadoop.yarn.api.records.Token getAMRMToken()":{"name":"getAMRMToken","returnType":"org.apache.hadoop.yarn.api.records.Token","args":[],"exceptions":[]},"void setAMCommand(org.apache.hadoop.yarn.api.records.AMCommand)":{"name":"setAMCommand","returnType":"void","args":["org.apache.hadoop.yarn.api.records.AMCommand"],"exceptions":[]},"void setAllocatedContainers(java.util.List)":{"name":"setAllocatedContainers","returnType":"void","args":["java.util.List"],"exceptions":[]},"int getNumClusterNodes()":{"name":"getNumClusterNodes","returnType":"int","args":[],"exceptions":[]},"void setNumClusterNodes(int)":{"name":"setNumClusterNodes","returnType":"void","args":["int"],"exceptions":[]},"void setCompletedContainersStatuses(java.util.List)":{"name":"setCompletedContainersStatuses","returnType":"void","args":["java.util.List"],"exceptions":[]},"void setAMRMToken(org.apache.hadoop.yarn.api.records.Token)":{"name":"setAMRMToken","returnType":"void","args":["org.apache.hadoop.yarn.api.records.Token"],"exceptions":[]},"org.apache.hadoop.yarn.api.records.PreemptionMessage getPreemptionMessage()":{"name":"getPreemptionMessage","returnType":"org.apache.hadoop.yarn.api.records.PreemptionMessage","args":[],"exceptions":[]},"java.util.List getCompletedContainersStatuses()":{"name":"getCompletedContainersStatuses","returnType":"java.util.List","args":[],"exceptions":[]},"org.apache.hadoop.yarn.api.records.Resource getAvailableResources()":{"name":"getAvailableResources","returnType":"org.apache.hadoop.yarn.api.records.Resource","args":[],"exceptions":[]},"void setAvailableResources(org.apache.hadoop.yarn.api.records.Resource)":{"name":"setAvailableResources","returnType":"void","args":["org.apache.hadoop.yarn.api.records.Resource"],"exceptions":[]},"org.apache.hadoop.yarn.api.protocolrecords.AllocateResponse newInstance(int, java.util.List, java.util.List, java.util.List, org.apache.hadoop.yarn.api.records.Resource, org.apache.hadoop.yarn.api.records.AMCommand, int, org.apache.hadoop.yarn.api.records.PreemptionMessage, java.util.List, org.apache.hadoop.yarn.api.records.Token, java.util.List, java.util.List)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.protocolrecords.AllocateResponse","args":["int","java.util.List","java.util.List","java.util.List","org.apache.hadoop.yarn.api.records.Resource","org.apache.hadoop.yarn.api.records.AMCommand","int","org.apache.hadoop.yarn.api.records.PreemptionMessage","java.util.List","org.apache.hadoop.yarn.api.records.Token","java.util.List","java.util.List"],"exceptions":[]},"void setPreemptionMessage(org.apache.hadoop.yarn.api.records.PreemptionMessage)":{"name":"setPreemptionMessage","returnType":"void","args":["org.apache.hadoop.yarn.api.records.PreemptionMessage"],"exceptions":[]},"org.apache.hadoop.yarn.api.protocolrecords.AllocateResponse newInstance(int, java.util.List, java.util.List, java.util.List, org.apache.hadoop.yarn.api.records.Resource, org.apache.hadoop.yarn.api.records.AMCommand, int, org.apache.hadoop.yarn.api.records.PreemptionMessage, java.util.List)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.protocolrecords.AllocateResponse","args":["int","java.util.List","java.util.List","java.util.List","org.apache.hadoop.yarn.api.records.Resource","org.apache.hadoop.yarn.api.records.AMCommand","int","org.apache.hadoop.yarn.api.records.PreemptionMessage","java.util.List"],"exceptions":[]},"java.util.List getAllocatedContainers()":{"name":"getAllocatedContainers","returnType":"java.util.List","args":[],"exceptions":[]},"int getResponseId()":{"name":"getResponseId","returnType":"int","args":[],"exceptions":[]},"java.util.List getDecreasedContainers()":{"name":"getDecreasedContainers","returnType":"java.util.List","args":[],"exceptions":[]}}},"org.apache.hadoop.yarn.api.records.LocalResourceVisibility":{"name":"org.apache.hadoop.yarn.api.records.LocalResourceVisibility","methods":{"org.apache.hadoop.yarn.api.records.LocalResourceVisibility valueOf(java.lang.String)":{"name":"valueOf","returnType":"org.apache.hadoop.yarn.api.records.LocalResourceVisibility","args":["java.lang.String"],"exceptions":[]},"[Lorg.apache.hadoop.yarn.api.records.LocalResourceVisibility; values()":{"name":"values","returnType":"[Lorg.apache.hadoop.yarn.api.records.LocalResourceVisibility;","args":[],"exceptions":[]}}},"org.apache.hadoop.yarn.api.records.QueueUserACLInfo":{"name":"org.apache.hadoop.yarn.api.records.QueueUserACLInfo","methods":{"void setQueueName(java.lang.String)":{"name":"setQueueName","returnType":"void","args":["java.lang.String"],"exceptions":[]},"void setUserAcls(java.util.List)":{"name":"setUserAcls","returnType":"void","args":["java.util.List"],"exceptions":[]},"java.lang.String getQueueName()":{"name":"getQueueName","returnType":"java.lang.String","args":[],"exceptions":[]},"java.util.List getUserAcls()":{"name":"getUserAcls","returnType":"java.util.List","args":[],"exceptions":[]},"org.apache.hadoop.yarn.api.records.QueueUserACLInfo newInstance(java.lang.String, java.util.List)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.records.QueueUserACLInfo","args":["java.lang.String","java.util.List"],"exceptions":[]}}},"org.apache.hadoop.yarn.api.protocolrecords.GetDelegationTokenRequest":{"name":"org.apache.hadoop.yarn.api.protocolrecords.GetDelegationTokenRequest","methods":{"void setRenewer(java.lang.String)":{"name":"setRenewer","returnType":"void","args":["java.lang.String"],"exceptions":[]},"org.apache.hadoop.yarn.api.protocolrecords.GetDelegationTokenRequest newInstance(java.lang.String)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.protocolrecords.GetDelegationTokenRequest","args":["java.lang.String"],"exceptions":[]},"java.lang.String getRenewer()":{"name":"getRenewer","returnType":"java.lang.String","args":[],"exceptions":[]}}},"org.apache.hadoop.yarn.api.records.ContainerState":{"name":"org.apache.hadoop.yarn.api.records.ContainerState","methods":{"org.apache.hadoop.yarn.api.records.ContainerState valueOf(java.lang.String)":{"name":"valueOf","returnType":"org.apache.hadoop.yarn.api.records.ContainerState","args":["java.lang.String"],"exceptions":[]},"[Lorg.apache.hadoop.yarn.api.records.ContainerState; values()":{"name":"values","returnType":"[Lorg.apache.hadoop.yarn.api.records.ContainerState;","args":[],"exceptions":[]}}},"org.apache.hadoop.yarn.api.protocolrecords.GetNewApplicationRequest":{"name":"org.apache.hadoop.yarn.api.protocolrecords.GetNewApplicationRequest","methods":{"org.apache.hadoop.yarn.api.protocolrecords.GetNewApplicationRequest newInstance()":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.protocolrecords.GetNewApplicationRequest","args":[],"exceptions":[]}}},"org.apache.hadoop.yarn.api.ContainerManagementProtocol":{"name":"org.apache.hadoop.yarn.api.ContainerManagementProtocol","methods":{"org.apache.hadoop.yarn.api.protocolrecords.GetContainerStatusesResponse getContainerStatuses(org.apache.hadoop.yarn.api.protocolrecords.GetContainerStatusesRequest) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"getContainerStatuses","returnType":"org.apache.hadoop.yarn.api.protocolrecords.GetContainerStatusesResponse","args":["org.apache.hadoop.yarn.api.protocolrecords.GetContainerStatusesRequest"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"org.apache.hadoop.yarn.api.protocolrecords.StartContainersResponse startContainers(org.apache.hadoop.yarn.api.protocolrecords.StartContainersRequest) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"startContainers","returnType":"org.apache.hadoop.yarn.api.protocolrecords.StartContainersResponse","args":["org.apache.hadoop.yarn.api.protocolrecords.StartContainersRequest"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"org.apache.hadoop.yarn.api.protocolrecords.StopContainersResponse stopContainers(org.apache.hadoop.yarn.api.protocolrecords.StopContainersRequest) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"stopContainers","returnType":"org.apache.hadoop.yarn.api.protocolrecords.StopContainersResponse","args":["org.apache.hadoop.yarn.api.protocolrecords.StopContainersRequest"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]}}},"org.apache.hadoop.yarn.exceptions.YarnException":{"name":"org.apache.hadoop.yarn.exceptions.YarnException","methods":{}},"org.apache.hadoop.yarn.api.records.QueueInfo":{"name":"org.apache.hadoop.yarn.api.records.QueueInfo","methods":{"void setQueueName(java.lang.String)":{"name":"setQueueName","returnType":"void","args":["java.lang.String"],"exceptions":[]},"void setCurrentCapacity(float)":{"name":"setCurrentCapacity","returnType":"void","args":["float"],"exceptions":[]},"void setCapacity(float)":{"name":"setCapacity","returnType":"void","args":["float"],"exceptions":[]},"java.lang.String getQueueName()":{"name":"getQueueName","returnType":"java.lang.String","args":[],"exceptions":[]},"java.util.List getChildQueues()":{"name":"getChildQueues","returnType":"java.util.List","args":[],"exceptions":[]},"org.apache.hadoop.yarn.api.records.QueueInfo newInstance(java.lang.String, float, float, float, java.util.List, java.util.List, org.apache.hadoop.yarn.api.records.QueueState, java.util.Set, java.lang.String)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.records.QueueInfo","args":["java.lang.String","float","float","float","java.util.List","java.util.List","org.apache.hadoop.yarn.api.records.QueueState","java.util.Set","java.lang.String"],"exceptions":[]},"void setDefaultNodeLabelExpression(java.lang.String)":{"name":"setDefaultNodeLabelExpression","returnType":"void","args":["java.lang.String"],"exceptions":[]},"java.util.List getApplications()":{"name":"getApplications","returnType":"java.util.List","args":[],"exceptions":[]},"float getCapacity()":{"name":"getCapacity","returnType":"float","args":[],"exceptions":[]},"float getCurrentCapacity()":{"name":"getCurrentCapacity","returnType":"float","args":[],"exceptions":[]},"org.apache.hadoop.yarn.api.records.QueueState getQueueState()":{"name":"getQueueState","returnType":"org.apache.hadoop.yarn.api.records.QueueState","args":[],"exceptions":[]},"void setChildQueues(java.util.List)":{"name":"setChildQueues","returnType":"void","args":["java.util.List"],"exceptions":[]},"void setApplications(java.util.List)":{"name":"setApplications","returnType":"void","args":["java.util.List"],"exceptions":[]},"java.lang.String getDefaultNodeLabelExpression()":{"name":"getDefaultNodeLabelExpression","returnType":"java.lang.String","args":[],"exceptions":[]},"void setMaximumCapacity(float)":{"name":"setMaximumCapacity","returnType":"void","args":["float"],"exceptions":[]},"void setQueueState(org.apache.hadoop.yarn.api.records.QueueState)":{"name":"setQueueState","returnType":"void","args":["org.apache.hadoop.yarn.api.records.QueueState"],"exceptions":[]},"void setAccessibleNodeLabels(java.util.Set)":{"name":"setAccessibleNodeLabels","returnType":"void","args":["java.util.Set"],"exceptions":[]},"float getMaximumCapacity()":{"name":"getMaximumCapacity","returnType":"float","args":[],"exceptions":[]},"java.util.Set getAccessibleNodeLabels()":{"name":"getAccessibleNodeLabels","returnType":"java.util.Set","args":[],"exceptions":[]}}},"org.apache.hadoop.yarn.api.protocolrecords.GetDelegationTokenResponse":{"name":"org.apache.hadoop.yarn.api.protocolrecords.GetDelegationTokenResponse","methods":{"void setRMDelegationToken(org.apache.hadoop.yarn.api.records.Token)":{"name":"setRMDelegationToken","returnType":"void","args":["org.apache.hadoop.yarn.api.records.Token"],"exceptions":[]},"org.apache.hadoop.yarn.api.protocolrecords.GetDelegationTokenResponse newInstance(org.apache.hadoop.yarn.api.records.Token)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.protocolrecords.GetDelegationTokenResponse","args":["org.apache.hadoop.yarn.api.records.Token"],"exceptions":[]},"org.apache.hadoop.yarn.api.records.Token getRMDelegationToken()":{"name":"getRMDelegationToken","returnType":"org.apache.hadoop.yarn.api.records.Token","args":[],"exceptions":[]}}},"org.apache.hadoop.yarn.api.records.NodeId":{"name":"org.apache.hadoop.yarn.api.records.NodeId","methods":{"int compareTo(org.apache.hadoop.yarn.api.records.NodeId)":{"name":"compareTo","returnType":"int","args":["org.apache.hadoop.yarn.api.records.NodeId"],"exceptions":[]},"int hashCode()":{"name":"hashCode","returnType":"int","args":[],"exceptions":[]},"java.lang.String toString()":{"name":"toString","returnType":"java.lang.String","args":[],"exceptions":[]},"org.apache.hadoop.yarn.api.records.NodeId newInstance(java.lang.String, int)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.records.NodeId","args":["java.lang.String","int"],"exceptions":[]},"java.lang.String getHost()":{"name":"getHost","returnType":"java.lang.String","args":[],"exceptions":[]},"int compareTo(java.lang.Object)":{"name":"compareTo","returnType":"int","args":["java.lang.Object"],"exceptions":[]},"boolean equals(java.lang.Object)":{"name":"equals","returnType":"boolean","args":["java.lang.Object"],"exceptions":[]},"int getPort()":{"name":"getPort","returnType":"int","args":[],"exceptions":[]}}},"org.apache.hadoop.yarn.api.protocolrecords.SubmitApplicationRequest":{"name":"org.apache.hadoop.yarn.api.protocolrecords.SubmitApplicationRequest","methods":{"org.apache.hadoop.yarn.api.protocolrecords.SubmitApplicationRequest newInstance(org.apache.hadoop.yarn.api.records.ApplicationSubmissionContext)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.protocolrecords.SubmitApplicationRequest","args":["org.apache.hadoop.yarn.api.records.ApplicationSubmissionContext"],"exceptions":[]},"void setApplicationSubmissionContext(org.apache.hadoop.yarn.api.records.ApplicationSubmissionContext)":{"name":"setApplicationSubmissionContext","returnType":"void","args":["org.apache.hadoop.yarn.api.records.ApplicationSubmissionContext"],"exceptions":[]},"org.apache.hadoop.yarn.api.records.ApplicationSubmissionContext getApplicationSubmissionContext()":{"name":"getApplicationSubmissionContext","returnType":"org.apache.hadoop.yarn.api.records.ApplicationSubmissionContext","args":[],"exceptions":[]}}},"org.apache.hadoop.yarn.api.protocolrecords.KillApplicationResponse":{"name":"org.apache.hadoop.yarn.api.protocolrecords.KillApplicationResponse","methods":{"org.apache.hadoop.yarn.api.protocolrecords.KillApplicationResponse newInstance(boolean)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.protocolrecords.KillApplicationResponse","args":["boolean"],"exceptions":[]},"void setIsKillCompleted(boolean)":{"name":"setIsKillCompleted","returnType":"void","args":["boolean"],"exceptions":[]},"boolean getIsKillCompleted()":{"name":"getIsKillCompleted","returnType":"boolean","args":[],"exceptions":[]}}},"org.apache.hadoop.yarn.api.ApplicationClientProtocol":{"name":"org.apache.hadoop.yarn.api.ApplicationClientProtocol","methods":{"org.apache.hadoop.yarn.api.protocolrecords.GetNewApplicationResponse getNewApplication(org.apache.hadoop.yarn.api.protocolrecords.GetNewApplicationRequest) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"getNewApplication","returnType":"org.apache.hadoop.yarn.api.protocolrecords.GetNewApplicationResponse","args":["org.apache.hadoop.yarn.api.protocolrecords.GetNewApplicationRequest"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"org.apache.hadoop.yarn.api.protocolrecords.GetClusterNodesResponse getClusterNodes(org.apache.hadoop.yarn.api.protocolrecords.GetClusterNodesRequest) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"getClusterNodes","returnType":"org.apache.hadoop.yarn.api.protocolrecords.GetClusterNodesResponse","args":["org.apache.hadoop.yarn.api.protocolrecords.GetClusterNodesRequest"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"org.apache.hadoop.yarn.api.protocolrecords.ReservationUpdateResponse updateReservation(org.apache.hadoop.yarn.api.protocolrecords.ReservationUpdateRequest) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"updateReservation","returnType":"org.apache.hadoop.yarn.api.protocolrecords.ReservationUpdateResponse","args":["org.apache.hadoop.yarn.api.protocolrecords.ReservationUpdateRequest"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"org.apache.hadoop.yarn.api.protocolrecords.GetLabelsToNodesResponse getLabelsToNodes(org.apache.hadoop.yarn.api.protocolrecords.GetLabelsToNodesRequest) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"getLabelsToNodes","returnType":"org.apache.hadoop.yarn.api.protocolrecords.GetLabelsToNodesResponse","args":["org.apache.hadoop.yarn.api.protocolrecords.GetLabelsToNodesRequest"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"org.apache.hadoop.yarn.api.protocolrecords.MoveApplicationAcrossQueuesResponse moveApplicationAcrossQueues(org.apache.hadoop.yarn.api.protocolrecords.MoveApplicationAcrossQueuesRequest) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"moveApplicationAcrossQueues","returnType":"org.apache.hadoop.yarn.api.protocolrecords.MoveApplicationAcrossQueuesResponse","args":["org.apache.hadoop.yarn.api.protocolrecords.MoveApplicationAcrossQueuesRequest"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"org.apache.hadoop.yarn.api.protocolrecords.ReservationDeleteResponse deleteReservation(org.apache.hadoop.yarn.api.protocolrecords.ReservationDeleteRequest) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"deleteReservation","returnType":"org.apache.hadoop.yarn.api.protocolrecords.ReservationDeleteResponse","args":["org.apache.hadoop.yarn.api.protocolrecords.ReservationDeleteRequest"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"org.apache.hadoop.yarn.api.protocolrecords.GetQueueUserAclsInfoResponse getQueueUserAcls(org.apache.hadoop.yarn.api.protocolrecords.GetQueueUserAclsInfoRequest) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"getQueueUserAcls","returnType":"org.apache.hadoop.yarn.api.protocolrecords.GetQueueUserAclsInfoResponse","args":["org.apache.hadoop.yarn.api.protocolrecords.GetQueueUserAclsInfoRequest"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"org.apache.hadoop.yarn.api.protocolrecords.SubmitApplicationResponse submitApplication(org.apache.hadoop.yarn.api.protocolrecords.SubmitApplicationRequest) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"submitApplication","returnType":"org.apache.hadoop.yarn.api.protocolrecords.SubmitApplicationResponse","args":["org.apache.hadoop.yarn.api.protocolrecords.SubmitApplicationRequest"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"org.apache.hadoop.yarn.api.protocolrecords.KillApplicationResponse forceKillApplication(org.apache.hadoop.yarn.api.protocolrecords.KillApplicationRequest) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"forceKillApplication","returnType":"org.apache.hadoop.yarn.api.protocolrecords.KillApplicationResponse","args":["org.apache.hadoop.yarn.api.protocolrecords.KillApplicationRequest"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"org.apache.hadoop.yarn.api.protocolrecords.GetNodesToLabelsResponse getNodeToLabels(org.apache.hadoop.yarn.api.protocolrecords.GetNodesToLabelsRequest) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"getNodeToLabels","returnType":"org.apache.hadoop.yarn.api.protocolrecords.GetNodesToLabelsResponse","args":["org.apache.hadoop.yarn.api.protocolrecords.GetNodesToLabelsRequest"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"org.apache.hadoop.yarn.api.protocolrecords.ReservationSubmissionResponse submitReservation(org.apache.hadoop.yarn.api.protocolrecords.ReservationSubmissionRequest) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"submitReservation","returnType":"org.apache.hadoop.yarn.api.protocolrecords.ReservationSubmissionResponse","args":["org.apache.hadoop.yarn.api.protocolrecords.ReservationSubmissionRequest"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"org.apache.hadoop.yarn.api.protocolrecords.GetQueueInfoResponse getQueueInfo(org.apache.hadoop.yarn.api.protocolrecords.GetQueueInfoRequest) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"getQueueInfo","returnType":"org.apache.hadoop.yarn.api.protocolrecords.GetQueueInfoResponse","args":["org.apache.hadoop.yarn.api.protocolrecords.GetQueueInfoRequest"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"org.apache.hadoop.yarn.api.protocolrecords.GetClusterMetricsResponse getClusterMetrics(org.apache.hadoop.yarn.api.protocolrecords.GetClusterMetricsRequest) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"getClusterMetrics","returnType":"org.apache.hadoop.yarn.api.protocolrecords.GetClusterMetricsResponse","args":["org.apache.hadoop.yarn.api.protocolrecords.GetClusterMetricsRequest"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"org.apache.hadoop.yarn.api.protocolrecords.GetClusterNodeLabelsResponse getClusterNodeLabels(org.apache.hadoop.yarn.api.protocolrecords.GetClusterNodeLabelsRequest) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"getClusterNodeLabels","returnType":"org.apache.hadoop.yarn.api.protocolrecords.GetClusterNodeLabelsResponse","args":["org.apache.hadoop.yarn.api.protocolrecords.GetClusterNodeLabelsRequest"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]}}},"org.apache.hadoop.yarn.api.protocolrecords.GetContainerStatusesRequest":{"name":"org.apache.hadoop.yarn.api.protocolrecords.GetContainerStatusesRequest","methods":{"void setContainerIds(java.util.List)":{"name":"setContainerIds","returnType":"void","args":["java.util.List"],"exceptions":[]},"org.apache.hadoop.yarn.api.protocolrecords.GetContainerStatusesRequest newInstance(java.util.List)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.protocolrecords.GetContainerStatusesRequest","args":["java.util.List"],"exceptions":[]},"java.util.List getContainerIds()":{"name":"getContainerIds","returnType":"java.util.List","args":[],"exceptions":[]}}},"org.apache.hadoop.yarn.api.protocolrecords.GetNewApplicationResponse":{"name":"org.apache.hadoop.yarn.api.protocolrecords.GetNewApplicationResponse","methods":{"org.apache.hadoop.yarn.api.protocolrecords.GetNewApplicationResponse newInstance(org.apache.hadoop.yarn.api.records.ApplicationId, org.apache.hadoop.yarn.api.records.Resource, org.apache.hadoop.yarn.api.records.Resource)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.protocolrecords.GetNewApplicationResponse","args":["org.apache.hadoop.yarn.api.records.ApplicationId","org.apache.hadoop.yarn.api.records.Resource","org.apache.hadoop.yarn.api.records.Resource"],"exceptions":[]},"org.apache.hadoop.yarn.api.records.ApplicationId getApplicationId()":{"name":"getApplicationId","returnType":"org.apache.hadoop.yarn.api.records.ApplicationId","args":[],"exceptions":[]},"void setApplicationId(org.apache.hadoop.yarn.api.records.ApplicationId)":{"name":"setApplicationId","returnType":"void","args":["org.apache.hadoop.yarn.api.records.ApplicationId"],"exceptions":[]},"org.apache.hadoop.yarn.api.records.Resource getMaximumResourceCapability()":{"name":"getMaximumResourceCapability","returnType":"org.apache.hadoop.yarn.api.records.Resource","args":[],"exceptions":[]},"void setMaximumResourceCapability(org.apache.hadoop.yarn.api.records.Resource)":{"name":"setMaximumResourceCapability","returnType":"void","args":["org.apache.hadoop.yarn.api.records.Resource"],"exceptions":[]}}},"org.apache.hadoop.yarn.api.records.ContainerId":{"name":"org.apache.hadoop.yarn.api.records.ContainerId","methods":{"int hashCode()":{"name":"hashCode","returnType":"int","args":[],"exceptions":[]},"java.lang.String toString()":{"name":"toString","returnType":"java.lang.String","args":[],"exceptions":[]},"org.apache.hadoop.yarn.api.records.ContainerId newInstance(org.apache.hadoop.yarn.api.records.ApplicationAttemptId, int)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.records.ContainerId","args":["org.apache.hadoop.yarn.api.records.ApplicationAttemptId","int"],"exceptions":[]},"int getId()":{"name":"getId","returnType":"int","args":[],"exceptions":[]},"long getContainerId()":{"name":"getContainerId","returnType":"long","args":[],"exceptions":[]},"int compareTo(org.apache.hadoop.yarn.api.records.ContainerId)":{"name":"compareTo","returnType":"int","args":["org.apache.hadoop.yarn.api.records.ContainerId"],"exceptions":[]},"org.apache.hadoop.yarn.api.records.ContainerId newContainerId(org.apache.hadoop.yarn.api.records.ApplicationAttemptId, long)":{"name":"newContainerId","returnType":"org.apache.hadoop.yarn.api.records.ContainerId","args":["org.apache.hadoop.yarn.api.records.ApplicationAttemptId","long"],"exceptions":[]},"org.apache.hadoop.yarn.api.records.ApplicationAttemptId getApplicationAttemptId()":{"name":"getApplicationAttemptId","returnType":"org.apache.hadoop.yarn.api.records.ApplicationAttemptId","args":[],"exceptions":[]},"org.apache.hadoop.yarn.api.records.ContainerId fromString(java.lang.String)":{"name":"fromString","returnType":"org.apache.hadoop.yarn.api.records.ContainerId","args":["java.lang.String"],"exceptions":[]},"int compareTo(java.lang.Object)":{"name":"compareTo","returnType":"int","args":["java.lang.Object"],"exceptions":[]},"boolean equals(java.lang.Object)":{"name":"equals","returnType":"boolean","args":["java.lang.Object"],"exceptions":[]}}},"org.apache.hadoop.yarn.api.records.Container":{"name":"org.apache.hadoop.yarn.api.records.Container","methods":{"org.apache.hadoop.yarn.api.records.Priority getPriority()":{"name":"getPriority","returnType":"org.apache.hadoop.yarn.api.records.Priority","args":[],"exceptions":[]},"java.lang.String getNodeHttpAddress()":{"name":"getNodeHttpAddress","returnType":"java.lang.String","args":[],"exceptions":[]},"org.apache.hadoop.yarn.api.records.NodeId getNodeId()":{"name":"getNodeId","returnType":"org.apache.hadoop.yarn.api.records.NodeId","args":[],"exceptions":[]},"org.apache.hadoop.yarn.api.records.ContainerId getId()":{"name":"getId","returnType":"org.apache.hadoop.yarn.api.records.ContainerId","args":[],"exceptions":[]},"void setContainerToken(org.apache.hadoop.yarn.api.records.Token)":{"name":"setContainerToken","returnType":"void","args":["org.apache.hadoop.yarn.api.records.Token"],"exceptions":[]},"void setNodeId(org.apache.hadoop.yarn.api.records.NodeId)":{"name":"setNodeId","returnType":"void","args":["org.apache.hadoop.yarn.api.records.NodeId"],"exceptions":[]},"org.apache.hadoop.yarn.api.records.Token getContainerToken()":{"name":"getContainerToken","returnType":"org.apache.hadoop.yarn.api.records.Token","args":[],"exceptions":[]},"org.apache.hadoop.yarn.api.records.Resource getResource()":{"name":"getResource","returnType":"org.apache.hadoop.yarn.api.records.Resource","args":[],"exceptions":[]},"org.apache.hadoop.yarn.api.records.Container newInstance(org.apache.hadoop.yarn.api.records.ContainerId, org.apache.hadoop.yarn.api.records.NodeId, java.lang.String, org.apache.hadoop.yarn.api.records.Resource, org.apache.hadoop.yarn.api.records.Priority, org.apache.hadoop.yarn.api.records.Token)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.records.Container","args":["org.apache.hadoop.yarn.api.records.ContainerId","org.apache.hadoop.yarn.api.records.NodeId","java.lang.String","org.apache.hadoop.yarn.api.records.Resource","org.apache.hadoop.yarn.api.records.Priority","org.apache.hadoop.yarn.api.records.Token"],"exceptions":[]},"void setPriority(org.apache.hadoop.yarn.api.records.Priority)":{"name":"setPriority","returnType":"void","args":["org.apache.hadoop.yarn.api.records.Priority"],"exceptions":[]},"void setResource(org.apache.hadoop.yarn.api.records.Resource)":{"name":"setResource","returnType":"void","args":["org.apache.hadoop.yarn.api.records.Resource"],"exceptions":[]},"void setId(org.apache.hadoop.yarn.api.records.ContainerId)":{"name":"setId","returnType":"void","args":["org.apache.hadoop.yarn.api.records.ContainerId"],"exceptions":[]},"void setNodeHttpAddress(java.lang.String)":{"name":"setNodeHttpAddress","returnType":"void","args":["java.lang.String"],"exceptions":[]}}},"org.apache.hadoop.yarn.api.records.ResourceRequest":{"name":"org.apache.hadoop.yarn.api.records.ResourceRequest","methods":{"int hashCode()":{"name":"hashCode","returnType":"int","args":[],"exceptions":[]},"org.apache.hadoop.yarn.api.records.ResourceRequest newInstance(org.apache.hadoop.yarn.api.records.Priority, java.lang.String, org.apache.hadoop.yarn.api.records.Resource, int, boolean, java.lang.String)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.records.ResourceRequest","args":["org.apache.hadoop.yarn.api.records.Priority","java.lang.String","org.apache.hadoop.yarn.api.records.Resource","int","boolean","java.lang.String"],"exceptions":[]},"org.apache.hadoop.yarn.api.records.Priority getPriority()":{"name":"getPriority","returnType":"org.apache.hadoop.yarn.api.records.Priority","args":[],"exceptions":[]},"void setCapability(org.apache.hadoop.yarn.api.records.Resource)":{"name":"setCapability","returnType":"void","args":["org.apache.hadoop.yarn.api.records.Resource"],"exceptions":[]},"int getNumContainers()":{"name":"getNumContainers","returnType":"int","args":[],"exceptions":[]},"org.apache.hadoop.yarn.api.records.ResourceRequest newInstance(org.apache.hadoop.yarn.api.records.Priority, java.lang.String, org.apache.hadoop.yarn.api.records.Resource, int)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.records.ResourceRequest","args":["org.apache.hadoop.yarn.api.records.Priority","java.lang.String","org.apache.hadoop.yarn.api.records.Resource","int"],"exceptions":[]},"void setRelaxLocality(boolean)":{"name":"setRelaxLocality","returnType":"void","args":["boolean"],"exceptions":[]},"void setResourceName(java.lang.String)":{"name":"setResourceName","returnType":"void","args":["java.lang.String"],"exceptions":[]},"org.apache.hadoop.yarn.api.records.Resource getCapability()":{"name":"getCapability","returnType":"org.apache.hadoop.yarn.api.records.Resource","args":[],"exceptions":[]},"int compareTo(java.lang.Object)":{"name":"compareTo","returnType":"int","args":["java.lang.Object"],"exceptions":[]},"boolean equals(java.lang.Object)":{"name":"equals","returnType":"boolean","args":["java.lang.Object"],"exceptions":[]},"org.apache.hadoop.yarn.api.records.ResourceRequest newInstance(org.apache.hadoop.yarn.api.records.Priority, java.lang.String, org.apache.hadoop.yarn.api.records.Resource, int, boolean)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.records.ResourceRequest","args":["org.apache.hadoop.yarn.api.records.Priority","java.lang.String","org.apache.hadoop.yarn.api.records.Resource","int","boolean"],"exceptions":[]},"void setNodeLabelExpression(java.lang.String)":{"name":"setNodeLabelExpression","returnType":"void","args":["java.lang.String"],"exceptions":[]},"java.lang.String getNodeLabelExpression()":{"name":"getNodeLabelExpression","returnType":"java.lang.String","args":[],"exceptions":[]},"boolean getRelaxLocality()":{"name":"getRelaxLocality","returnType":"boolean","args":[],"exceptions":[]},"int compareTo(org.apache.hadoop.yarn.api.records.ResourceRequest)":{"name":"compareTo","returnType":"int","args":["org.apache.hadoop.yarn.api.records.ResourceRequest"],"exceptions":[]},"java.lang.String getResourceName()":{"name":"getResourceName","returnType":"java.lang.String","args":[],"exceptions":[]},"void setPriority(org.apache.hadoop.yarn.api.records.Priority)":{"name":"setPriority","returnType":"void","args":["org.apache.hadoop.yarn.api.records.Priority"],"exceptions":[]},"void setNumContainers(int)":{"name":"setNumContainers","returnType":"void","args":["int"],"exceptions":[]},"boolean isAnyLocation(java.lang.String)":{"name":"isAnyLocation","returnType":"boolean","args":["java.lang.String"],"exceptions":[]}}},"org.apache.hadoop.yarn.api.records.ContainerStatus":{"name":"org.apache.hadoop.yarn.api.records.ContainerStatus","methods":{"org.apache.hadoop.yarn.api.records.ContainerState getState()":{"name":"getState","returnType":"org.apache.hadoop.yarn.api.records.ContainerState","args":[],"exceptions":[]},"org.apache.hadoop.yarn.api.records.ContainerId getContainerId()":{"name":"getContainerId","returnType":"org.apache.hadoop.yarn.api.records.ContainerId","args":[],"exceptions":[]},"int getExitStatus()":{"name":"getExitStatus","returnType":"int","args":[],"exceptions":[]},"void setExitStatus(int)":{"name":"setExitStatus","returnType":"void","args":["int"],"exceptions":[]},"void setState(org.apache.hadoop.yarn.api.records.ContainerState)":{"name":"setState","returnType":"void","args":["org.apache.hadoop.yarn.api.records.ContainerState"],"exceptions":[]},"void setDiagnostics(java.lang.String)":{"name":"setDiagnostics","returnType":"void","args":["java.lang.String"],"exceptions":[]},"org.apache.hadoop.yarn.api.records.ContainerStatus newInstance(org.apache.hadoop.yarn.api.records.ContainerId, org.apache.hadoop.yarn.api.records.ContainerState, java.lang.String, int)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.records.ContainerStatus","args":["org.apache.hadoop.yarn.api.records.ContainerId","org.apache.hadoop.yarn.api.records.ContainerState","java.lang.String","int"],"exceptions":[]},"void setContainerId(org.apache.hadoop.yarn.api.records.ContainerId)":{"name":"setContainerId","returnType":"void","args":["org.apache.hadoop.yarn.api.records.ContainerId"],"exceptions":[]},"java.lang.String getDiagnostics()":{"name":"getDiagnostics","returnType":"java.lang.String","args":[],"exceptions":[]}}},"org.apache.hadoop.yarn.api.records.ApplicationSubmissionContext":{"name":"org.apache.hadoop.yarn.api.records.ApplicationSubmissionContext","methods":{"void setMaxAppAttempts(int)":{"name":"setMaxAppAttempts","returnType":"void","args":["int"],"exceptions":[]},"org.apache.hadoop.yarn.api.records.ReservationId getReservationID()":{"name":"getReservationID","returnType":"org.apache.hadoop.yarn.api.records.ReservationId","args":[],"exceptions":[]},"java.util.Set getApplicationTags()":{"name":"getApplicationTags","returnType":"java.util.Set","args":[],"exceptions":[]},"org.apache.hadoop.yarn.api.records.ApplicationId getApplicationId()":{"name":"getApplicationId","returnType":"org.apache.hadoop.yarn.api.records.ApplicationId","args":[],"exceptions":[]},"boolean getUnmanagedAM()":{"name":"getUnmanagedAM","returnType":"boolean","args":[],"exceptions":[]},"void setApplicationType(java.lang.String)":{"name":"setApplicationType","returnType":"void","args":["java.lang.String"],"exceptions":[]},"org.apache.hadoop.yarn.api.records.ApplicationSubmissionContext newInstance(org.apache.hadoop.yarn.api.records.ApplicationId, java.lang.String, java.lang.String, org.apache.hadoop.yarn.api.records.ContainerLaunchContext, boolean, boolean, int, java.lang.String, boolean, java.lang.String, org.apache.hadoop.yarn.api.records.ResourceRequest)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.records.ApplicationSubmissionContext","args":["org.apache.hadoop.yarn.api.records.ApplicationId","java.lang.String","java.lang.String","org.apache.hadoop.yarn.api.records.ContainerLaunchContext","boolean","boolean","int","java.lang.String","boolean","java.lang.String","org.apache.hadoop.yarn.api.records.ResourceRequest"],"exceptions":[]},"void setUnmanagedAM(boolean)":{"name":"setUnmanagedAM","returnType":"void","args":["boolean"],"exceptions":[]},"org.apache.hadoop.yarn.api.records.ApplicationSubmissionContext newInstance(org.apache.hadoop.yarn.api.records.ApplicationId, java.lang.String, java.lang.String, org.apache.hadoop.yarn.api.records.Priority, org.apache.hadoop.yarn.api.records.ContainerLaunchContext, boolean, boolean, int, org.apache.hadoop.yarn.api.records.Resource, java.lang.String, boolean, org.apache.hadoop.yarn.api.records.LogAggregationContext)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.records.ApplicationSubmissionContext","args":["org.apache.hadoop.yarn.api.records.ApplicationId","java.lang.String","java.lang.String","org.apache.hadoop.yarn.api.records.Priority","org.apache.hadoop.yarn.api.records.ContainerLaunchContext","boolean","boolean","int","org.apache.hadoop.yarn.api.records.Resource","java.lang.String","boolean","org.apache.hadoop.yarn.api.records.LogAggregationContext"],"exceptions":[]},"org.apache.hadoop.yarn.api.records.ApplicationSubmissionContext newInstance(org.apache.hadoop.yarn.api.records.ApplicationId, java.lang.String, java.lang.String, org.apache.hadoop.yarn.api.records.Priority, org.apache.hadoop.yarn.api.records.ContainerLaunchContext, boolean, boolean, int, org.apache.hadoop.yarn.api.records.Resource, java.lang.String, boolean)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.records.ApplicationSubmissionContext","args":["org.apache.hadoop.yarn.api.records.ApplicationId","java.lang.String","java.lang.String","org.apache.hadoop.yarn.api.records.Priority","org.apache.hadoop.yarn.api.records.ContainerLaunchContext","boolean","boolean","int","org.apache.hadoop.yarn.api.records.Resource","java.lang.String","boolean"],"exceptions":[]},"org.apache.hadoop.yarn.api.records.Resource getResource()":{"name":"getResource","returnType":"org.apache.hadoop.yarn.api.records.Resource","args":[],"exceptions":[]},"void setNodeLabelExpression(java.lang.String)":{"name":"setNodeLabelExpression","returnType":"void","args":["java.lang.String"],"exceptions":[]},"long getAttemptFailuresValidityInterval()":{"name":"getAttemptFailuresValidityInterval","returnType":"long","args":[],"exceptions":[]},"void setQueue(java.lang.String)":{"name":"setQueue","returnType":"void","args":["java.lang.String"],"exceptions":[]},"void setAMContainerResourceRequest(org.apache.hadoop.yarn.api.records.ResourceRequest)":{"name":"setAMContainerResourceRequest","returnType":"void","args":["org.apache.hadoop.yarn.api.records.ResourceRequest"],"exceptions":[]},"org.apache.hadoop.yarn.api.records.ApplicationSubmissionContext newInstance(org.apache.hadoop.yarn.api.records.ApplicationId, java.lang.String, java.lang.String, org.apache.hadoop.yarn.api.records.Priority, org.apache.hadoop.yarn.api.records.ContainerLaunchContext, boolean, boolean, int, org.apache.hadoop.yarn.api.records.Resource, java.lang.String, boolean, long)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.records.ApplicationSubmissionContext","args":["org.apache.hadoop.yarn.api.records.ApplicationId","java.lang.String","java.lang.String","org.apache.hadoop.yarn.api.records.Priority","org.apache.hadoop.yarn.api.records.ContainerLaunchContext","boolean","boolean","int","org.apache.hadoop.yarn.api.records.Resource","java.lang.String","boolean","long"],"exceptions":[]},"org.apache.hadoop.yarn.api.records.LogAggregationContext getLogAggregationContext()":{"name":"getLogAggregationContext","returnType":"org.apache.hadoop.yarn.api.records.LogAggregationContext","args":[],"exceptions":[]},"java.lang.String getNodeLabelExpression()":{"name":"getNodeLabelExpression","returnType":"java.lang.String","args":[],"exceptions":[]},"void setLogAggregationContext(org.apache.hadoop.yarn.api.records.LogAggregationContext)":{"name":"setLogAggregationContext","returnType":"void","args":["org.apache.hadoop.yarn.api.records.LogAggregationContext"],"exceptions":[]},"void setReservationID(org.apache.hadoop.yarn.api.records.ReservationId)":{"name":"setReservationID","returnType":"void","args":["org.apache.hadoop.yarn.api.records.ReservationId"],"exceptions":[]},"void setApplicationName(java.lang.String)":{"name":"setApplicationName","returnType":"void","args":["java.lang.String"],"exceptions":[]},"boolean getCancelTokensWhenComplete()":{"name":"getCancelTokensWhenComplete","returnType":"boolean","args":[],"exceptions":[]},"org.apache.hadoop.yarn.api.records.ResourceRequest getAMContainerResourceRequest()":{"name":"getAMContainerResourceRequest","returnType":"org.apache.hadoop.yarn.api.records.ResourceRequest","args":[],"exceptions":[]},"void setApplicationTags(java.util.Set)":{"name":"setApplicationTags","returnType":"void","args":["java.util.Set"],"exceptions":[]},"org.apache.hadoop.yarn.api.records.ApplicationSubmissionContext newInstance(org.apache.hadoop.yarn.api.records.ApplicationId, java.lang.String, java.lang.String, org.apache.hadoop.yarn.api.records.Priority, org.apache.hadoop.yarn.api.records.ContainerLaunchContext, boolean, boolean, int, org.apache.hadoop.yarn.api.records.Resource)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.records.ApplicationSubmissionContext","args":["org.apache.hadoop.yarn.api.records.ApplicationId","java.lang.String","java.lang.String","org.apache.hadoop.yarn.api.records.Priority","org.apache.hadoop.yarn.api.records.ContainerLaunchContext","boolean","boolean","int","org.apache.hadoop.yarn.api.records.Resource"],"exceptions":[]},"void setResource(org.apache.hadoop.yarn.api.records.Resource)":{"name":"setResource","returnType":"void","args":["org.apache.hadoop.yarn.api.records.Resource"],"exceptions":[]},"java.lang.String getQueue()":{"name":"getQueue","returnType":"java.lang.String","args":[],"exceptions":[]},"int getMaxAppAttempts()":{"name":"getMaxAppAttempts","returnType":"int","args":[],"exceptions":[]},"void setAttemptFailuresValidityInterval(long)":{"name":"setAttemptFailuresValidityInterval","returnType":"void","args":["long"],"exceptions":[]},"org.apache.hadoop.yarn.api.records.Priority getPriority()":{"name":"getPriority","returnType":"org.apache.hadoop.yarn.api.records.Priority","args":[],"exceptions":[]},"void setCancelTokensWhenComplete(boolean)":{"name":"setCancelTokensWhenComplete","returnType":"void","args":["boolean"],"exceptions":[]},"org.apache.hadoop.yarn.api.records.ApplicationSubmissionContext newInstance(org.apache.hadoop.yarn.api.records.ApplicationId, java.lang.String, java.lang.String, org.apache.hadoop.yarn.api.records.Priority, org.apache.hadoop.yarn.api.records.ContainerLaunchContext, boolean, boolean, int, org.apache.hadoop.yarn.api.records.Resource, java.lang.String, boolean, java.lang.String, java.lang.String)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.records.ApplicationSubmissionContext","args":["org.apache.hadoop.yarn.api.records.ApplicationId","java.lang.String","java.lang.String","org.apache.hadoop.yarn.api.records.Priority","org.apache.hadoop.yarn.api.records.ContainerLaunchContext","boolean","boolean","int","org.apache.hadoop.yarn.api.records.Resource","java.lang.String","boolean","java.lang.String","java.lang.String"],"exceptions":[]},"void setApplicationId(org.apache.hadoop.yarn.api.records.ApplicationId)":{"name":"setApplicationId","returnType":"void","args":["org.apache.hadoop.yarn.api.records.ApplicationId"],"exceptions":[]},"boolean getKeepContainersAcrossApplicationAttempts()":{"name":"getKeepContainersAcrossApplicationAttempts","returnType":"boolean","args":[],"exceptions":[]},"org.apache.hadoop.yarn.api.records.ApplicationSubmissionContext newInstance(org.apache.hadoop.yarn.api.records.ApplicationId, java.lang.String, java.lang.String, org.apache.hadoop.yarn.api.records.Priority, org.apache.hadoop.yarn.api.records.ContainerLaunchContext, boolean, boolean, int, org.apache.hadoop.yarn.api.records.Resource, java.lang.String)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.records.ApplicationSubmissionContext","args":["org.apache.hadoop.yarn.api.records.ApplicationId","java.lang.String","java.lang.String","org.apache.hadoop.yarn.api.records.Priority","org.apache.hadoop.yarn.api.records.ContainerLaunchContext","boolean","boolean","int","org.apache.hadoop.yarn.api.records.Resource","java.lang.String"],"exceptions":[]},"void setAMContainerSpec(org.apache.hadoop.yarn.api.records.ContainerLaunchContext)":{"name":"setAMContainerSpec","returnType":"void","args":["org.apache.hadoop.yarn.api.records.ContainerLaunchContext"],"exceptions":[]},"org.apache.hadoop.yarn.api.records.ContainerLaunchContext getAMContainerSpec()":{"name":"getAMContainerSpec","returnType":"org.apache.hadoop.yarn.api.records.ContainerLaunchContext","args":[],"exceptions":[]},"java.lang.String getApplicationType()":{"name":"getApplicationType","returnType":"java.lang.String","args":[],"exceptions":[]},"java.lang.String getApplicationName()":{"name":"getApplicationName","returnType":"java.lang.String","args":[],"exceptions":[]},"void setKeepContainersAcrossApplicationAttempts(boolean)":{"name":"setKeepContainersAcrossApplicationAttempts","returnType":"void","args":["boolean"],"exceptions":[]},"void setPriority(org.apache.hadoop.yarn.api.records.Priority)":{"name":"setPriority","returnType":"void","args":["org.apache.hadoop.yarn.api.records.Priority"],"exceptions":[]}}},"org.apache.hadoop.yarn.api.protocolrecords.GetClusterNodesResponse":{"name":"org.apache.hadoop.yarn.api.protocolrecords.GetClusterNodesResponse","methods":{"org.apache.hadoop.yarn.api.protocolrecords.GetClusterNodesResponse newInstance(java.util.List)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.protocolrecords.GetClusterNodesResponse","args":["java.util.List"],"exceptions":[]},"void setNodeReports(java.util.List)":{"name":"setNodeReports","returnType":"void","args":["java.util.List"],"exceptions":[]},"java.util.List getNodeReports()":{"name":"getNodeReports","returnType":"java.util.List","args":[],"exceptions":[]}}},"org.apache.hadoop.yarn.api.records.Token":{"name":"org.apache.hadoop.yarn.api.records.Token","methods":{"org.apache.hadoop.yarn.api.records.Token newInstance([B, java.lang.String, [B, java.lang.String)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.records.Token","args":["[B","java.lang.String","[B","java.lang.String"],"exceptions":[]},"void setIdentifier(java.nio.ByteBuffer)":{"name":"setIdentifier","returnType":"void","args":["java.nio.ByteBuffer"],"exceptions":[]},"void setPassword(java.nio.ByteBuffer)":{"name":"setPassword","returnType":"void","args":["java.nio.ByteBuffer"],"exceptions":[]},"java.lang.String getKind()":{"name":"getKind","returnType":"java.lang.String","args":[],"exceptions":[]},"void setKind(java.lang.String)":{"name":"setKind","returnType":"void","args":["java.lang.String"],"exceptions":[]},"void setService(java.lang.String)":{"name":"setService","returnType":"void","args":["java.lang.String"],"exceptions":[]},"java.nio.ByteBuffer getPassword()":{"name":"getPassword","returnType":"java.nio.ByteBuffer","args":[],"exceptions":[]},"java.nio.ByteBuffer getIdentifier()":{"name":"getIdentifier","returnType":"java.nio.ByteBuffer","args":[],"exceptions":[]},"java.lang.String getService()":{"name":"getService","returnType":"java.lang.String","args":[],"exceptions":[]}}},"org.apache.hadoop.yarn.api.protocolrecords.StartContainersResponse":{"name":"org.apache.hadoop.yarn.api.protocolrecords.StartContainersResponse","methods":{"void setAllServicesMetaData(java.util.Map)":{"name":"setAllServicesMetaData","returnType":"void","args":["java.util.Map"],"exceptions":[]},"org.apache.hadoop.yarn.api.protocolrecords.StartContainersResponse newInstance(java.util.Map, java.util.List, java.util.Map)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.protocolrecords.StartContainersResponse","args":["java.util.Map","java.util.List","java.util.Map"],"exceptions":[]},"void setFailedRequests(java.util.Map)":{"name":"setFailedRequests","returnType":"void","args":["java.util.Map"],"exceptions":[]},"void setSuccessfullyStartedContainers(java.util.List)":{"name":"setSuccessfullyStartedContainers","returnType":"void","args":["java.util.List"],"exceptions":[]},"java.util.Map getAllServicesMetaData()":{"name":"getAllServicesMetaData","returnType":"java.util.Map","args":[],"exceptions":[]},"java.util.List getSuccessfullyStartedContainers()":{"name":"getSuccessfullyStartedContainers","returnType":"java.util.List","args":[],"exceptions":[]},"java.util.Map getFailedRequests()":{"name":"getFailedRequests","returnType":"java.util.Map","args":[],"exceptions":[]}}},"org.apache.hadoop.yarn.api.protocolrecords.RegisterApplicationMasterResponse":{"name":"org.apache.hadoop.yarn.api.protocolrecords.RegisterApplicationMasterResponse","methods":{"void setNMTokensFromPreviousAttempts(java.util.List)":{"name":"setNMTokensFromPreviousAttempts","returnType":"void","args":["java.util.List"],"exceptions":[]},"java.util.Map getApplicationACLs()":{"name":"getApplicationACLs","returnType":"java.util.Map","args":[],"exceptions":[]},"void setSchedulerResourceTypes(java.util.EnumSet)":{"name":"setSchedulerResourceTypes","returnType":"void","args":["java.util.EnumSet"],"exceptions":[]},"java.util.List getContainersFromPreviousAttempts()":{"name":"getContainersFromPreviousAttempts","returnType":"java.util.List","args":[],"exceptions":[]},"java.util.List getNMTokensFromPreviousAttempts()":{"name":"getNMTokensFromPreviousAttempts","returnType":"java.util.List","args":[],"exceptions":[]},"java.util.EnumSet getSchedulerResourceTypes()":{"name":"getSchedulerResourceTypes","returnType":"java.util.EnumSet","args":[],"exceptions":[]},"void setApplicationACLs(java.util.Map)":{"name":"setApplicationACLs","returnType":"void","args":["java.util.Map"],"exceptions":[]},"void setQueue(java.lang.String)":{"name":"setQueue","returnType":"void","args":["java.lang.String"],"exceptions":[]},"org.apache.hadoop.yarn.api.protocolrecords.RegisterApplicationMasterResponse newInstance(org.apache.hadoop.yarn.api.records.Resource, org.apache.hadoop.yarn.api.records.Resource, java.util.Map, java.nio.ByteBuffer, java.util.List, java.lang.String, java.util.List)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.protocolrecords.RegisterApplicationMasterResponse","args":["org.apache.hadoop.yarn.api.records.Resource","org.apache.hadoop.yarn.api.records.Resource","java.util.Map","java.nio.ByteBuffer","java.util.List","java.lang.String","java.util.List"],"exceptions":[]},"org.apache.hadoop.yarn.api.records.Resource getMaximumResourceCapability()":{"name":"getMaximumResourceCapability","returnType":"org.apache.hadoop.yarn.api.records.Resource","args":[],"exceptions":[]},"void setClientToAMTokenMasterKey(java.nio.ByteBuffer)":{"name":"setClientToAMTokenMasterKey","returnType":"void","args":["java.nio.ByteBuffer"],"exceptions":[]},"java.lang.String getQueue()":{"name":"getQueue","returnType":"java.lang.String","args":[],"exceptions":[]},"void setMaximumResourceCapability(org.apache.hadoop.yarn.api.records.Resource)":{"name":"setMaximumResourceCapability","returnType":"void","args":["org.apache.hadoop.yarn.api.records.Resource"],"exceptions":[]},"java.nio.ByteBuffer getClientToAMTokenMasterKey()":{"name":"getClientToAMTokenMasterKey","returnType":"java.nio.ByteBuffer","args":[],"exceptions":[]},"void setContainersFromPreviousAttempts(java.util.List)":{"name":"setContainersFromPreviousAttempts","returnType":"void","args":["java.util.List"],"exceptions":[]}}},"org.apache.hadoop.yarn.api.records.ResourceRequest$ResourceRequestComparator":{"name":"org.apache.hadoop.yarn.api.records.ResourceRequest$ResourceRequestComparator","methods":{"int compare(java.lang.Object, java.lang.Object)":{"name":"compare","returnType":"int","args":["java.lang.Object","java.lang.Object"],"exceptions":[]},"int compare(org.apache.hadoop.yarn.api.records.ResourceRequest, org.apache.hadoop.yarn.api.records.ResourceRequest)":{"name":"compare","returnType":"int","args":["org.apache.hadoop.yarn.api.records.ResourceRequest","org.apache.hadoop.yarn.api.records.ResourceRequest"],"exceptions":[]}}},"org.apache.hadoop.yarn.api.records.QueueACL":{"name":"org.apache.hadoop.yarn.api.records.QueueACL","methods":{"org.apache.hadoop.yarn.api.records.QueueACL valueOf(java.lang.String)":{"name":"valueOf","returnType":"org.apache.hadoop.yarn.api.records.QueueACL","args":["java.lang.String"],"exceptions":[]},"[Lorg.apache.hadoop.yarn.api.records.QueueACL; values()":{"name":"values","returnType":"[Lorg.apache.hadoop.yarn.api.records.QueueACL;","args":[],"exceptions":[]}}},"org.apache.hadoop.yarn.api.protocolrecords.GetClusterNodesRequest":{"name":"org.apache.hadoop.yarn.api.protocolrecords.GetClusterNodesRequest","methods":{"org.apache.hadoop.yarn.api.protocolrecords.GetClusterNodesRequest newInstance(java.util.EnumSet)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.protocolrecords.GetClusterNodesRequest","args":["java.util.EnumSet"],"exceptions":[]},"org.apache.hadoop.yarn.api.protocolrecords.GetClusterNodesRequest newInstance()":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.protocolrecords.GetClusterNodesRequest","args":[],"exceptions":[]},"void setNodeStates(java.util.EnumSet)":{"name":"setNodeStates","returnType":"void","args":["java.util.EnumSet"],"exceptions":[]},"java.util.EnumSet getNodeStates()":{"name":"getNodeStates","returnType":"java.util.EnumSet","args":[],"exceptions":[]}}},"org.apache.hadoop.yarn.api.protocolrecords.GetContainerStatusesResponse":{"name":"org.apache.hadoop.yarn.api.protocolrecords.GetContainerStatusesResponse","methods":{"void setContainerStatuses(java.util.List)":{"name":"setContainerStatuses","returnType":"void","args":["java.util.List"],"exceptions":[]},"org.apache.hadoop.yarn.api.protocolrecords.GetContainerStatusesResponse newInstance(java.util.List, java.util.Map)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.protocolrecords.GetContainerStatusesResponse","args":["java.util.List","java.util.Map"],"exceptions":[]},"void setFailedRequests(java.util.Map)":{"name":"setFailedRequests","returnType":"void","args":["java.util.Map"],"exceptions":[]},"java.util.List getContainerStatuses()":{"name":"getContainerStatuses","returnType":"java.util.List","args":[],"exceptions":[]},"java.util.Map getFailedRequests()":{"name":"getFailedRequests","returnType":"java.util.Map","args":[],"exceptions":[]}}},"org.apache.hadoop.yarn.api.records.LocalResource":{"name":"org.apache.hadoop.yarn.api.records.LocalResource","methods":{"void setPattern(java.lang.String)":{"name":"setPattern","returnType":"void","args":["java.lang.String"],"exceptions":[]},"org.apache.hadoop.yarn.api.records.LocalResourceVisibility getVisibility()":{"name":"getVisibility","returnType":"org.apache.hadoop.yarn.api.records.LocalResourceVisibility","args":[],"exceptions":[]},"org.apache.hadoop.yarn.api.records.LocalResource newInstance(org.apache.hadoop.yarn.api.records.URL, org.apache.hadoop.yarn.api.records.LocalResourceType, org.apache.hadoop.yarn.api.records.LocalResourceVisibility, long, long, java.lang.String, boolean)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.records.LocalResource","args":["org.apache.hadoop.yarn.api.records.URL","org.apache.hadoop.yarn.api.records.LocalResourceType","org.apache.hadoop.yarn.api.records.LocalResourceVisibility","long","long","java.lang.String","boolean"],"exceptions":[]},"org.apache.hadoop.yarn.api.records.LocalResource newInstance(org.apache.hadoop.yarn.api.records.URL, org.apache.hadoop.yarn.api.records.LocalResourceType, org.apache.hadoop.yarn.api.records.LocalResourceVisibility, long, long, java.lang.String)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.records.LocalResource","args":["org.apache.hadoop.yarn.api.records.URL","org.apache.hadoop.yarn.api.records.LocalResourceType","org.apache.hadoop.yarn.api.records.LocalResourceVisibility","long","long","java.lang.String"],"exceptions":[]},"long getSize()":{"name":"getSize","returnType":"long","args":[],"exceptions":[]},"void setTimestamp(long)":{"name":"setTimestamp","returnType":"void","args":["long"],"exceptions":[]},"org.apache.hadoop.yarn.api.records.LocalResourceType getType()":{"name":"getType","returnType":"org.apache.hadoop.yarn.api.records.LocalResourceType","args":[],"exceptions":[]},"org.apache.hadoop.yarn.api.records.LocalResource newInstance(org.apache.hadoop.yarn.api.records.URL, org.apache.hadoop.yarn.api.records.LocalResourceType, org.apache.hadoop.yarn.api.records.LocalResourceVisibility, long, long, boolean)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.records.LocalResource","args":["org.apache.hadoop.yarn.api.records.URL","org.apache.hadoop.yarn.api.records.LocalResourceType","org.apache.hadoop.yarn.api.records.LocalResourceVisibility","long","long","boolean"],"exceptions":[]},"void setVisibility(org.apache.hadoop.yarn.api.records.LocalResourceVisibility)":{"name":"setVisibility","returnType":"void","args":["org.apache.hadoop.yarn.api.records.LocalResourceVisibility"],"exceptions":[]},"boolean getShouldBeUploadedToSharedCache()":{"name":"getShouldBeUploadedToSharedCache","returnType":"boolean","args":[],"exceptions":[]},"void setType(org.apache.hadoop.yarn.api.records.LocalResourceType)":{"name":"setType","returnType":"void","args":["org.apache.hadoop.yarn.api.records.LocalResourceType"],"exceptions":[]},"void setShouldBeUploadedToSharedCache(boolean)":{"name":"setShouldBeUploadedToSharedCache","returnType":"void","args":["boolean"],"exceptions":[]},"java.lang.String getPattern()":{"name":"getPattern","returnType":"java.lang.String","args":[],"exceptions":[]},"long getTimestamp()":{"name":"getTimestamp","returnType":"long","args":[],"exceptions":[]},"org.apache.hadoop.yarn.api.records.URL getResource()":{"name":"getResource","returnType":"org.apache.hadoop.yarn.api.records.URL","args":[],"exceptions":[]},"org.apache.hadoop.yarn.api.records.LocalResource newInstance(org.apache.hadoop.yarn.api.records.URL, org.apache.hadoop.yarn.api.records.LocalResourceType, org.apache.hadoop.yarn.api.records.LocalResourceVisibility, long, long)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.records.LocalResource","args":["org.apache.hadoop.yarn.api.records.URL","org.apache.hadoop.yarn.api.records.LocalResourceType","org.apache.hadoop.yarn.api.records.LocalResourceVisibility","long","long"],"exceptions":[]},"void setSize(long)":{"name":"setSize","returnType":"void","args":["long"],"exceptions":[]},"void setResource(org.apache.hadoop.yarn.api.records.URL)":{"name":"setResource","returnType":"void","args":["org.apache.hadoop.yarn.api.records.URL"],"exceptions":[]}}},"org.apache.hadoop.yarn.api.protocolrecords.GetQueueUserAclsInfoRequest":{"name":"org.apache.hadoop.yarn.api.protocolrecords.GetQueueUserAclsInfoRequest","methods":{"org.apache.hadoop.yarn.api.protocolrecords.GetQueueUserAclsInfoRequest newInstance()":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.protocolrecords.GetQueueUserAclsInfoRequest","args":[],"exceptions":[]}}}}} \ No newline at end of file diff --git a/bigtop-tests/smoke-tests/odpi-runtime/src/test/resources/hadoop-yarn-bin.list b/bigtop-tests/smoke-tests/odpi-runtime/src/test/resources/hadoop-yarn-bin.list new file mode 100644 index 00000000..d3861b97 --- /dev/null +++ b/bigtop-tests/smoke-tests/odpi-runtime/src/test/resources/hadoop-yarn-bin.list @@ -0,0 +1,3 @@ +mapred +yarn +container-executor diff --git a/bigtop-tests/smoke-tests/odpi-runtime/src/test/resources/hadoop-yarn-client-2.7.3-api-report.json b/bigtop-tests/smoke-tests/odpi-runtime/src/test/resources/hadoop-yarn-client-2.7.3-api-report.json new file mode 100644 index 00000000..f62ee8ed --- /dev/null +++ b/bigtop-tests/smoke-tests/odpi-runtime/src/test/resources/hadoop-yarn-client-2.7.3-api-report.json @@ -0,0 +1 @@ +{"name":"hadoop-yarn-client","version":"2.7.3","classes":{"org.apache.hadoop.yarn.client.api.YarnClient":{"name":"org.apache.hadoop.yarn.client.api.YarnClient","methods":{"java.util.List getQueueAclsInfo() throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"getQueueAclsInfo","returnType":"java.util.List","args":[],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"org.apache.hadoop.yarn.client.api.YarnClient createYarnClient()":{"name":"createYarnClient","returnType":"org.apache.hadoop.yarn.client.api.YarnClient","args":[],"exceptions":[]},"java.util.List getApplications(java.util.Set) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"getApplications","returnType":"java.util.List","args":["java.util.Set"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"java.util.Map getNodeToLabels() throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"getNodeToLabels","returnType":"java.util.Map","args":[],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"org.apache.hadoop.yarn.api.protocolrecords.ReservationUpdateResponse updateReservation(org.apache.hadoop.yarn.api.protocolrecords.ReservationUpdateRequest) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"updateReservation","returnType":"org.apache.hadoop.yarn.api.protocolrecords.ReservationUpdateResponse","args":["org.apache.hadoop.yarn.api.protocolrecords.ReservationUpdateRequest"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"java.util.List getAllQueues() throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"getAllQueues","returnType":"java.util.List","args":[],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"java.util.List getApplicationAttempts(org.apache.hadoop.yarn.api.records.ApplicationId) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"getApplicationAttempts","returnType":"java.util.List","args":["org.apache.hadoop.yarn.api.records.ApplicationId"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"org.apache.hadoop.yarn.client.api.YarnClientApplication createApplication() throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"createApplication","returnType":"org.apache.hadoop.yarn.client.api.YarnClientApplication","args":[],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"java.util.Set getClusterNodeLabels() throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"getClusterNodeLabels","returnType":"java.util.Set","args":[],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"void moveApplicationAcrossQueues(org.apache.hadoop.yarn.api.records.ApplicationId, java.lang.String) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"moveApplicationAcrossQueues","returnType":"void","args":["org.apache.hadoop.yarn.api.records.ApplicationId","java.lang.String"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"org.apache.hadoop.yarn.api.records.ApplicationAttemptReport getApplicationAttemptReport(org.apache.hadoop.yarn.api.records.ApplicationAttemptId) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"getApplicationAttemptReport","returnType":"org.apache.hadoop.yarn.api.records.ApplicationAttemptReport","args":["org.apache.hadoop.yarn.api.records.ApplicationAttemptId"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"org.apache.hadoop.yarn.api.records.Token getRMDelegationToken(org.apache.hadoop.io.Text) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"getRMDelegationToken","returnType":"org.apache.hadoop.yarn.api.records.Token","args":["org.apache.hadoop.io.Text"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"org.apache.hadoop.yarn.api.protocolrecords.ReservationSubmissionResponse submitReservation(org.apache.hadoop.yarn.api.protocolrecords.ReservationSubmissionRequest) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"submitReservation","returnType":"org.apache.hadoop.yarn.api.protocolrecords.ReservationSubmissionResponse","args":["org.apache.hadoop.yarn.api.protocolrecords.ReservationSubmissionRequest"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"java.util.List getContainers(org.apache.hadoop.yarn.api.records.ApplicationAttemptId) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"getContainers","returnType":"java.util.List","args":["org.apache.hadoop.yarn.api.records.ApplicationAttemptId"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"org.apache.hadoop.yarn.api.records.ApplicationReport getApplicationReport(org.apache.hadoop.yarn.api.records.ApplicationId) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"getApplicationReport","returnType":"org.apache.hadoop.yarn.api.records.ApplicationReport","args":["org.apache.hadoop.yarn.api.records.ApplicationId"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"org.apache.hadoop.yarn.api.records.ApplicationId submitApplication(org.apache.hadoop.yarn.api.records.ApplicationSubmissionContext) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"submitApplication","returnType":"org.apache.hadoop.yarn.api.records.ApplicationId","args":["org.apache.hadoop.yarn.api.records.ApplicationSubmissionContext"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"java.util.Map getLabelsToNodes() throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"getLabelsToNodes","returnType":"java.util.Map","args":[],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"org.apache.hadoop.security.token.Token getAMRMToken(org.apache.hadoop.yarn.api.records.ApplicationId) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"getAMRMToken","returnType":"org.apache.hadoop.security.token.Token","args":["org.apache.hadoop.yarn.api.records.ApplicationId"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"java.util.List getApplications(java.util.EnumSet) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"getApplications","returnType":"java.util.List","args":["java.util.EnumSet"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"java.util.List getRootQueueInfos() throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"getRootQueueInfos","returnType":"java.util.List","args":[],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"org.apache.hadoop.yarn.api.records.QueueInfo getQueueInfo(java.lang.String) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"getQueueInfo","returnType":"org.apache.hadoop.yarn.api.records.QueueInfo","args":["java.lang.String"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"java.util.List getChildQueueInfos(java.lang.String) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"getChildQueueInfos","returnType":"java.util.List","args":["java.lang.String"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"java.util.Map getLabelsToNodes(java.util.Set) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"getLabelsToNodes","returnType":"java.util.Map","args":["java.util.Set"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"org.apache.hadoop.yarn.api.protocolrecords.ReservationDeleteResponse deleteReservation(org.apache.hadoop.yarn.api.protocolrecords.ReservationDeleteRequest) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"deleteReservation","returnType":"org.apache.hadoop.yarn.api.protocolrecords.ReservationDeleteResponse","args":["org.apache.hadoop.yarn.api.protocolrecords.ReservationDeleteRequest"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"org.apache.hadoop.yarn.api.records.YarnClusterMetrics getYarnClusterMetrics() throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"getYarnClusterMetrics","returnType":"org.apache.hadoop.yarn.api.records.YarnClusterMetrics","args":[],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"java.util.List getNodeReports([Lorg.apache.hadoop.yarn.api.records.NodeState;) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"getNodeReports","returnType":"java.util.List","args":["[Lorg.apache.hadoop.yarn.api.records.NodeState;"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"void killApplication(org.apache.hadoop.yarn.api.records.ApplicationId) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"killApplication","returnType":"void","args":["org.apache.hadoop.yarn.api.records.ApplicationId"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"java.util.List getApplications() throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"getApplications","returnType":"java.util.List","args":[],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"java.util.List getApplications(java.util.Set, java.util.EnumSet) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"getApplications","returnType":"java.util.List","args":["java.util.Set","java.util.EnumSet"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"org.apache.hadoop.yarn.api.records.ContainerReport getContainerReport(org.apache.hadoop.yarn.api.records.ContainerId) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"getContainerReport","returnType":"org.apache.hadoop.yarn.api.records.ContainerReport","args":["org.apache.hadoop.yarn.api.records.ContainerId"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]}}},"org.apache.hadoop.yarn.client.api.async.NMClientAsync":{"name":"org.apache.hadoop.yarn.client.api.async.NMClientAsync","methods":{"void setClient(org.apache.hadoop.yarn.client.api.NMClient)":{"name":"setClient","returnType":"void","args":["org.apache.hadoop.yarn.client.api.NMClient"],"exceptions":[]},"void setCallbackHandler(org.apache.hadoop.yarn.client.api.async.NMClientAsync$CallbackHandler)":{"name":"setCallbackHandler","returnType":"void","args":["org.apache.hadoop.yarn.client.api.async.NMClientAsync$CallbackHandler"],"exceptions":[]},"void getContainerStatusAsync(org.apache.hadoop.yarn.api.records.ContainerId, org.apache.hadoop.yarn.api.records.NodeId)":{"name":"getContainerStatusAsync","returnType":"void","args":["org.apache.hadoop.yarn.api.records.ContainerId","org.apache.hadoop.yarn.api.records.NodeId"],"exceptions":[]},"void startContainerAsync(org.apache.hadoop.yarn.api.records.Container, org.apache.hadoop.yarn.api.records.ContainerLaunchContext)":{"name":"startContainerAsync","returnType":"void","args":["org.apache.hadoop.yarn.api.records.Container","org.apache.hadoop.yarn.api.records.ContainerLaunchContext"],"exceptions":[]},"void stopContainerAsync(org.apache.hadoop.yarn.api.records.ContainerId, org.apache.hadoop.yarn.api.records.NodeId)":{"name":"stopContainerAsync","returnType":"void","args":["org.apache.hadoop.yarn.api.records.ContainerId","org.apache.hadoop.yarn.api.records.NodeId"],"exceptions":[]},"org.apache.hadoop.yarn.client.api.NMClient getClient()":{"name":"getClient","returnType":"org.apache.hadoop.yarn.client.api.NMClient","args":[],"exceptions":[]},"org.apache.hadoop.yarn.client.api.async.NMClientAsync$CallbackHandler getCallbackHandler()":{"name":"getCallbackHandler","returnType":"org.apache.hadoop.yarn.client.api.async.NMClientAsync$CallbackHandler","args":[],"exceptions":[]},"org.apache.hadoop.yarn.client.api.async.NMClientAsync createNMClientAsync(org.apache.hadoop.yarn.client.api.async.NMClientAsync$CallbackHandler)":{"name":"createNMClientAsync","returnType":"org.apache.hadoop.yarn.client.api.async.NMClientAsync","args":["org.apache.hadoop.yarn.client.api.async.NMClientAsync$CallbackHandler"],"exceptions":[]}}},"org.apache.hadoop.yarn.client.api.AMRMClient":{"name":"org.apache.hadoop.yarn.client.api.AMRMClient","methods":{"org.apache.hadoop.yarn.client.api.NMTokenCache getNMTokenCache()":{"name":"getNMTokenCache","returnType":"org.apache.hadoop.yarn.client.api.NMTokenCache","args":[],"exceptions":[]},"void addContainerRequest(org.apache.hadoop.yarn.client.api.AMRMClient$ContainerRequest)":{"name":"addContainerRequest","returnType":"void","args":["org.apache.hadoop.yarn.client.api.AMRMClient$ContainerRequest"],"exceptions":[]},"void updateBlacklist(java.util.List, java.util.List)":{"name":"updateBlacklist","returnType":"void","args":["java.util.List","java.util.List"],"exceptions":[]},"java.util.List getMatchingRequests(org.apache.hadoop.yarn.api.records.Priority, java.lang.String, org.apache.hadoop.yarn.api.records.Resource)":{"name":"getMatchingRequests","returnType":"java.util.List","args":["org.apache.hadoop.yarn.api.records.Priority","java.lang.String","org.apache.hadoop.yarn.api.records.Resource"],"exceptions":[]},"void waitFor(com.google.common.base.Supplier) throws java.lang.InterruptedException":{"name":"waitFor","returnType":"void","args":["com.google.common.base.Supplier"],"exceptions":["java.lang.InterruptedException"]},"org.apache.hadoop.yarn.api.records.Resource getAvailableResources()":{"name":"getAvailableResources","returnType":"org.apache.hadoop.yarn.api.records.Resource","args":[],"exceptions":[]},"void waitFor(com.google.common.base.Supplier, int) throws java.lang.InterruptedException":{"name":"waitFor","returnType":"void","args":["com.google.common.base.Supplier","int"],"exceptions":["java.lang.InterruptedException"]},"org.apache.hadoop.yarn.api.protocolrecords.RegisterApplicationMasterResponse registerApplicationMaster(java.lang.String, int, java.lang.String) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"registerApplicationMaster","returnType":"org.apache.hadoop.yarn.api.protocolrecords.RegisterApplicationMasterResponse","args":["java.lang.String","int","java.lang.String"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"void unregisterApplicationMaster(org.apache.hadoop.yarn.api.records.FinalApplicationStatus, java.lang.String, java.lang.String) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"unregisterApplicationMaster","returnType":"void","args":["org.apache.hadoop.yarn.api.records.FinalApplicationStatus","java.lang.String","java.lang.String"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"void waitFor(com.google.common.base.Supplier, int, int) throws java.lang.InterruptedException":{"name":"waitFor","returnType":"void","args":["com.google.common.base.Supplier","int","int"],"exceptions":["java.lang.InterruptedException"]},"int getClusterNodeCount()":{"name":"getClusterNodeCount","returnType":"int","args":[],"exceptions":[]},"void releaseAssignedContainer(org.apache.hadoop.yarn.api.records.ContainerId)":{"name":"releaseAssignedContainer","returnType":"void","args":["org.apache.hadoop.yarn.api.records.ContainerId"],"exceptions":[]},"void setNMTokenCache(org.apache.hadoop.yarn.client.api.NMTokenCache)":{"name":"setNMTokenCache","returnType":"void","args":["org.apache.hadoop.yarn.client.api.NMTokenCache"],"exceptions":[]},"void removeContainerRequest(org.apache.hadoop.yarn.client.api.AMRMClient$ContainerRequest)":{"name":"removeContainerRequest","returnType":"void","args":["org.apache.hadoop.yarn.client.api.AMRMClient$ContainerRequest"],"exceptions":[]},"org.apache.hadoop.yarn.client.api.AMRMClient createAMRMClient()":{"name":"createAMRMClient","returnType":"org.apache.hadoop.yarn.client.api.AMRMClient","args":[],"exceptions":[]},"org.apache.hadoop.yarn.api.protocolrecords.AllocateResponse allocate(float) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"allocate","returnType":"org.apache.hadoop.yarn.api.protocolrecords.AllocateResponse","args":["float"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]}}},"org.apache.hadoop.yarn.client.api.YarnClientApplication":{"name":"org.apache.hadoop.yarn.client.api.YarnClientApplication","methods":{"org.apache.hadoop.yarn.api.protocolrecords.GetNewApplicationResponse getNewApplicationResponse()":{"name":"getNewApplicationResponse","returnType":"org.apache.hadoop.yarn.api.protocolrecords.GetNewApplicationResponse","args":[],"exceptions":[]},"org.apache.hadoop.yarn.api.records.ApplicationSubmissionContext getApplicationSubmissionContext()":{"name":"getApplicationSubmissionContext","returnType":"org.apache.hadoop.yarn.api.records.ApplicationSubmissionContext","args":[],"exceptions":[]}}},"org.apache.hadoop.yarn.client.api.AHSClient":{"name":"org.apache.hadoop.yarn.client.api.AHSClient","methods":{"org.apache.hadoop.yarn.api.records.ApplicationAttemptReport getApplicationAttemptReport(org.apache.hadoop.yarn.api.records.ApplicationAttemptId) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"getApplicationAttemptReport","returnType":"org.apache.hadoop.yarn.api.records.ApplicationAttemptReport","args":["org.apache.hadoop.yarn.api.records.ApplicationAttemptId"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"java.util.List getApplications() throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"getApplications","returnType":"java.util.List","args":[],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"java.util.List getContainers(org.apache.hadoop.yarn.api.records.ApplicationAttemptId) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"getContainers","returnType":"java.util.List","args":["org.apache.hadoop.yarn.api.records.ApplicationAttemptId"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"org.apache.hadoop.yarn.api.records.ApplicationReport getApplicationReport(org.apache.hadoop.yarn.api.records.ApplicationId) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"getApplicationReport","returnType":"org.apache.hadoop.yarn.api.records.ApplicationReport","args":["org.apache.hadoop.yarn.api.records.ApplicationId"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"org.apache.hadoop.yarn.client.api.AHSClient createAHSClient()":{"name":"createAHSClient","returnType":"org.apache.hadoop.yarn.client.api.AHSClient","args":[],"exceptions":[]},"org.apache.hadoop.yarn.api.records.ContainerReport getContainerReport(org.apache.hadoop.yarn.api.records.ContainerId) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"getContainerReport","returnType":"org.apache.hadoop.yarn.api.records.ContainerReport","args":["org.apache.hadoop.yarn.api.records.ContainerId"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"java.util.List getApplicationAttempts(org.apache.hadoop.yarn.api.records.ApplicationId) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"getApplicationAttempts","returnType":"java.util.List","args":["org.apache.hadoop.yarn.api.records.ApplicationId"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]}}},"org.apache.hadoop.yarn.client.api.async.AMRMClientAsync":{"name":"org.apache.hadoop.yarn.client.api.async.AMRMClientAsync","methods":{"void addContainerRequest(org.apache.hadoop.yarn.client.api.AMRMClient$ContainerRequest)":{"name":"addContainerRequest","returnType":"void","args":["org.apache.hadoop.yarn.client.api.AMRMClient$ContainerRequest"],"exceptions":[]},"void updateBlacklist(java.util.List, java.util.List)":{"name":"updateBlacklist","returnType":"void","args":["java.util.List","java.util.List"],"exceptions":[]},"java.util.List getMatchingRequests(org.apache.hadoop.yarn.api.records.Priority, java.lang.String, org.apache.hadoop.yarn.api.records.Resource)":{"name":"getMatchingRequests","returnType":"java.util.List","args":["org.apache.hadoop.yarn.api.records.Priority","java.lang.String","org.apache.hadoop.yarn.api.records.Resource"],"exceptions":[]},"void waitFor(com.google.common.base.Supplier) throws java.lang.InterruptedException":{"name":"waitFor","returnType":"void","args":["com.google.common.base.Supplier"],"exceptions":["java.lang.InterruptedException"]},"org.apache.hadoop.yarn.client.api.async.AMRMClientAsync createAMRMClientAsync(int, org.apache.hadoop.yarn.client.api.async.AMRMClientAsync$CallbackHandler)":{"name":"createAMRMClientAsync","returnType":"org.apache.hadoop.yarn.client.api.async.AMRMClientAsync","args":["int","org.apache.hadoop.yarn.client.api.async.AMRMClientAsync$CallbackHandler"],"exceptions":[]},"org.apache.hadoop.yarn.api.records.Resource getAvailableResources()":{"name":"getAvailableResources","returnType":"org.apache.hadoop.yarn.api.records.Resource","args":[],"exceptions":[]},"void waitFor(com.google.common.base.Supplier, int) throws java.lang.InterruptedException":{"name":"waitFor","returnType":"void","args":["com.google.common.base.Supplier","int"],"exceptions":["java.lang.InterruptedException"]},"org.apache.hadoop.yarn.client.api.async.AMRMClientAsync createAMRMClientAsync(org.apache.hadoop.yarn.client.api.AMRMClient, int, org.apache.hadoop.yarn.client.api.async.AMRMClientAsync$CallbackHandler)":{"name":"createAMRMClientAsync","returnType":"org.apache.hadoop.yarn.client.api.async.AMRMClientAsync","args":["org.apache.hadoop.yarn.client.api.AMRMClient","int","org.apache.hadoop.yarn.client.api.async.AMRMClientAsync$CallbackHandler"],"exceptions":[]},"org.apache.hadoop.yarn.api.protocolrecords.RegisterApplicationMasterResponse registerApplicationMaster(java.lang.String, int, java.lang.String) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"registerApplicationMaster","returnType":"org.apache.hadoop.yarn.api.protocolrecords.RegisterApplicationMasterResponse","args":["java.lang.String","int","java.lang.String"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"void unregisterApplicationMaster(org.apache.hadoop.yarn.api.records.FinalApplicationStatus, java.lang.String, java.lang.String) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"unregisterApplicationMaster","returnType":"void","args":["org.apache.hadoop.yarn.api.records.FinalApplicationStatus","java.lang.String","java.lang.String"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"void waitFor(com.google.common.base.Supplier, int, int) throws java.lang.InterruptedException":{"name":"waitFor","returnType":"void","args":["com.google.common.base.Supplier","int","int"],"exceptions":["java.lang.InterruptedException"]},"int getClusterNodeCount()":{"name":"getClusterNodeCount","returnType":"int","args":[],"exceptions":[]},"void releaseAssignedContainer(org.apache.hadoop.yarn.api.records.ContainerId)":{"name":"releaseAssignedContainer","returnType":"void","args":["org.apache.hadoop.yarn.api.records.ContainerId"],"exceptions":[]},"void removeContainerRequest(org.apache.hadoop.yarn.client.api.AMRMClient$ContainerRequest)":{"name":"removeContainerRequest","returnType":"void","args":["org.apache.hadoop.yarn.client.api.AMRMClient$ContainerRequest"],"exceptions":[]},"void setHeartbeatInterval(int)":{"name":"setHeartbeatInterval","returnType":"void","args":["int"],"exceptions":[]}}},"org.apache.hadoop.yarn.client.api.NMClient":{"name":"org.apache.hadoop.yarn.client.api.NMClient","methods":{"void stopContainer(org.apache.hadoop.yarn.api.records.ContainerId, org.apache.hadoop.yarn.api.records.NodeId) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"stopContainer","returnType":"void","args":["org.apache.hadoop.yarn.api.records.ContainerId","org.apache.hadoop.yarn.api.records.NodeId"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"org.apache.hadoop.yarn.api.records.ContainerStatus getContainerStatus(org.apache.hadoop.yarn.api.records.ContainerId, org.apache.hadoop.yarn.api.records.NodeId) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"getContainerStatus","returnType":"org.apache.hadoop.yarn.api.records.ContainerStatus","args":["org.apache.hadoop.yarn.api.records.ContainerId","org.apache.hadoop.yarn.api.records.NodeId"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"org.apache.hadoop.yarn.client.api.NMTokenCache getNMTokenCache()":{"name":"getNMTokenCache","returnType":"org.apache.hadoop.yarn.client.api.NMTokenCache","args":[],"exceptions":[]},"org.apache.hadoop.yarn.client.api.NMClient createNMClient()":{"name":"createNMClient","returnType":"org.apache.hadoop.yarn.client.api.NMClient","args":[],"exceptions":[]},"java.util.Map startContainer(org.apache.hadoop.yarn.api.records.Container, org.apache.hadoop.yarn.api.records.ContainerLaunchContext) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"startContainer","returnType":"java.util.Map","args":["org.apache.hadoop.yarn.api.records.Container","org.apache.hadoop.yarn.api.records.ContainerLaunchContext"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"org.apache.hadoop.yarn.client.api.NMClient createNMClient(java.lang.String)":{"name":"createNMClient","returnType":"org.apache.hadoop.yarn.client.api.NMClient","args":["java.lang.String"],"exceptions":[]},"void setNMTokenCache(org.apache.hadoop.yarn.client.api.NMTokenCache)":{"name":"setNMTokenCache","returnType":"void","args":["org.apache.hadoop.yarn.client.api.NMTokenCache"],"exceptions":[]},"void cleanupRunningContainersOnStop(boolean)":{"name":"cleanupRunningContainersOnStop","returnType":"void","args":["boolean"],"exceptions":[]}}}}} \ No newline at end of file diff --git a/bigtop-tests/smoke-tests/odpi-runtime/src/test/resources/hadoop-yarn-common-2.7.3-api-report.json b/bigtop-tests/smoke-tests/odpi-runtime/src/test/resources/hadoop-yarn-common-2.7.3-api-report.json new file mode 100644 index 00000000..b394bff9 --- /dev/null +++ b/bigtop-tests/smoke-tests/odpi-runtime/src/test/resources/hadoop-yarn-common-2.7.3-api-report.json @@ -0,0 +1 @@ +{"name":"hadoop-yarn-common","version":"2.7.3","classes":{"org.apache.hadoop.yarn.security.ContainerTokenSelector":{"name":"org.apache.hadoop.yarn.security.ContainerTokenSelector","methods":{"org.apache.hadoop.security.token.Token selectToken(org.apache.hadoop.io.Text, java.util.Collection)":{"name":"selectToken","returnType":"org.apache.hadoop.security.token.Token","args":["org.apache.hadoop.io.Text","java.util.Collection"],"exceptions":[]}}},"org.apache.hadoop.yarn.security.ContainerManagerSecurityInfo":{"name":"org.apache.hadoop.yarn.security.ContainerManagerSecurityInfo","methods":{"org.apache.hadoop.security.KerberosInfo getKerberosInfo(java.lang.Class, org.apache.hadoop.conf.Configuration)":{"name":"getKerberosInfo","returnType":"org.apache.hadoop.security.KerberosInfo","args":["java.lang.Class","org.apache.hadoop.conf.Configuration"],"exceptions":[]},"org.apache.hadoop.security.token.TokenInfo getTokenInfo(java.lang.Class, org.apache.hadoop.conf.Configuration)":{"name":"getTokenInfo","returnType":"org.apache.hadoop.security.token.TokenInfo","args":["java.lang.Class","org.apache.hadoop.conf.Configuration"],"exceptions":[]}}},"org.apache.hadoop.yarn.security.SchedulerSecurityInfo":{"name":"org.apache.hadoop.yarn.security.SchedulerSecurityInfo","methods":{"org.apache.hadoop.security.KerberosInfo getKerberosInfo(java.lang.Class, org.apache.hadoop.conf.Configuration)":{"name":"getKerberosInfo","returnType":"org.apache.hadoop.security.KerberosInfo","args":["java.lang.Class","org.apache.hadoop.conf.Configuration"],"exceptions":[]},"org.apache.hadoop.security.token.TokenInfo getTokenInfo(java.lang.Class, org.apache.hadoop.conf.Configuration)":{"name":"getTokenInfo","returnType":"org.apache.hadoop.security.token.TokenInfo","args":["java.lang.Class","org.apache.hadoop.conf.Configuration"],"exceptions":[]}}},"org.apache.hadoop.yarn.util.SystemClock":{"name":"org.apache.hadoop.yarn.util.SystemClock","methods":{"long getTime()":{"name":"getTime","returnType":"long","args":[],"exceptions":[]}}},"org.apache.hadoop.yarn.security.client.RMDelegationTokenSelector":{"name":"org.apache.hadoop.yarn.security.client.RMDelegationTokenSelector","methods":{"org.apache.hadoop.security.token.Token selectToken(org.apache.hadoop.io.Text, java.util.Collection)":{"name":"selectToken","returnType":"org.apache.hadoop.security.token.Token","args":["org.apache.hadoop.io.Text","java.util.Collection"],"exceptions":[]}}},"org.apache.hadoop.yarn.security.client.ClientRMSecurityInfo":{"name":"org.apache.hadoop.yarn.security.client.ClientRMSecurityInfo","methods":{"org.apache.hadoop.security.KerberosInfo getKerberosInfo(java.lang.Class, org.apache.hadoop.conf.Configuration)":{"name":"getKerberosInfo","returnType":"org.apache.hadoop.security.KerberosInfo","args":["java.lang.Class","org.apache.hadoop.conf.Configuration"],"exceptions":[]},"org.apache.hadoop.security.token.TokenInfo getTokenInfo(java.lang.Class, org.apache.hadoop.conf.Configuration)":{"name":"getTokenInfo","returnType":"org.apache.hadoop.security.token.TokenInfo","args":["java.lang.Class","org.apache.hadoop.conf.Configuration"],"exceptions":[]}}},"org.apache.hadoop.yarn.security.admin.AdminSecurityInfo":{"name":"org.apache.hadoop.yarn.security.admin.AdminSecurityInfo","methods":{"org.apache.hadoop.security.KerberosInfo getKerberosInfo(java.lang.Class, org.apache.hadoop.conf.Configuration)":{"name":"getKerberosInfo","returnType":"org.apache.hadoop.security.KerberosInfo","args":["java.lang.Class","org.apache.hadoop.conf.Configuration"],"exceptions":[]},"org.apache.hadoop.security.token.TokenInfo getTokenInfo(java.lang.Class, org.apache.hadoop.conf.Configuration)":{"name":"getTokenInfo","returnType":"org.apache.hadoop.security.token.TokenInfo","args":["java.lang.Class","org.apache.hadoop.conf.Configuration"],"exceptions":[]}}},"org.apache.hadoop.yarn.client.ClientRMProxy":{"name":"org.apache.hadoop.yarn.client.ClientRMProxy","methods":{"org.apache.hadoop.io.Text getRMDelegationTokenService(org.apache.hadoop.conf.Configuration)":{"name":"getRMDelegationTokenService","returnType":"org.apache.hadoop.io.Text","args":["org.apache.hadoop.conf.Configuration"],"exceptions":[]},"org.apache.hadoop.io.Text getAMRMTokenService(org.apache.hadoop.conf.Configuration)":{"name":"getAMRMTokenService","returnType":"org.apache.hadoop.io.Text","args":["org.apache.hadoop.conf.Configuration"],"exceptions":[]},"org.apache.hadoop.io.Text getTokenService(org.apache.hadoop.conf.Configuration, java.lang.String, java.lang.String, int)":{"name":"getTokenService","returnType":"org.apache.hadoop.io.Text","args":["org.apache.hadoop.conf.Configuration","java.lang.String","java.lang.String","int"],"exceptions":[]},"java.lang.Object createRMProxy(org.apache.hadoop.conf.Configuration, java.lang.Class) throws java.io.IOException":{"name":"createRMProxy","returnType":"java.lang.Object","args":["org.apache.hadoop.conf.Configuration","java.lang.Class"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.yarn.util.Clock":{"name":"org.apache.hadoop.yarn.util.Clock","methods":{"long getTime()":{"name":"getTime","returnType":"long","args":[],"exceptions":[]}}}}} \ No newline at end of file diff --git a/bigtop-tests/smoke-tests/odpi-runtime/src/test/resources/hadoop-yarn-jar.list b/bigtop-tests/smoke-tests/odpi-runtime/src/test/resources/hadoop-yarn-jar.list new file mode 100644 index 00000000..26613d4e --- /dev/null +++ b/bigtop-tests/smoke-tests/odpi-runtime/src/test/resources/hadoop-yarn-jar.list @@ -0,0 +1,38 @@ +netty-3\.6\.2\.Final[\.\-_].*jar +leveldbjni-all-1\.8[\.\-_].*jar +jackson-core-asl-1\.9\.13[\.\-_].*jar +jackson-xc-1\.9\.13[\.\-_].*jar +jersey-server-1\.9[\.\-_].*jar +stax-api-1\.0-2[\.\-_].*jar +zookeeper-3\.4\.6[\.\-_].*jar +guice-3\.0[\.\-_].*jar +jaxb-impl-2\.2\.3-1[\.\-_].*jar +zookeeper-3\.4\.6.*-tests\.jar +jersey-client-1\.9[\.\-_].*jar +commons-cli-1\.2[\.\-_].*jar +log4j-1\.2\.17[\.\-_].*jar +jackson-mapper-asl-1\.9\.13[\.\-_].*jar +guava-11\.0\.2[\.\-_].*jar +jetty-6\.1\.26[\.\-_].*jar +commons-logging-1\.1\.3[\.\-_].*jar +jersey-core-1\.9[\.\-_].*jar +jersey-guice-1\.9[\.\-_].*jar +commons-compress-1\.4\.1[\.\-_].*jar +jettison-1\.1[\.\-_].*jar +commons-collections-3\.2\.[12][\.\-_].*jar +xz-1\.0[\.\-_].*jar +asm-3\.2[\.\-_].*jar +commons-codec-1\.4[\.\-_].*jar +aopalliance-1\.0[\.\-_].*jar +javax\.inject-1[\.\-_].*jar +commons-lang-2\.6[\.\-_].*jar +jetty-util-6\.1\.26[\.\-_].*jar +jsr305-3\.0\.0[\.\-_].*jar +protobuf-java-2\.5\.0[\.\-_].*jar +commons-io-2\.4[\.\-_].*jar +activation-1\.1[\.\-_].*jar +jersey-json-1\.9[\.\-_].*jar +jaxb-api-2\.2\.2[\.\-_].*jar +guice-servlet-3\.0[\.\-_].*jar +servlet-api-2\.5[\.\-_].*jar +jackson-jaxrs-1\.9\.13[\.\-_].*jar diff --git a/bigtop-tests/smoke-tests/odpi-runtime/src/test/resources/hadoop-yarn.list b/bigtop-tests/smoke-tests/odpi-runtime/src/test/resources/hadoop-yarn.list new file mode 100644 index 00000000..bb880052 --- /dev/null +++ b/bigtop-tests/smoke-tests/odpi-runtime/src/test/resources/hadoop-yarn.list @@ -0,0 +1,74 @@ +hadoop-yarn-server-sharedcachemanager.*\.jar +bin +bin/mapred +bin/container-executor +bin/yarn +sbin +sbin/yarn-daemon\.sh +sbin/yarn-daemons\.sh +hadoop-yarn-registry-2\.7\.[0-9].*\.jar +hadoop-yarn-applications-unmanaged-am-launcher-2\.7\.[0-9].*\.jar +hadoop-yarn-common-2\.7\.[0-9].*\.jar +hadoop-yarn-server-nodemanager.*\.jar +hadoop-yarn-server-applicationhistoryservice-2\.7\.[0-9].*\.jar +hadoop-yarn-server-common.*\.jar +etc +etc/hadoop +hadoop-yarn-server-common-2\.7\.[0-9].*\.jar +hadoop-yarn-server-tests.*\.jar +hadoop-yarn-server-resourcemanager.*\.jar +hadoop-yarn-server-web-proxy.*\.jar +hadoop-yarn-api-2\.7\.[0-9].*\.jar +hadoop-yarn-common.*\.jar +hadoop-yarn-server-web-proxy-2\.7\.[0-9].*\.jar +hadoop-yarn-applications-distributedshell-2\.7\.[0-9].*\.jar +hadoop-yarn-server-tests-2\.7\.[0-9].*\.jar +hadoop-yarn-server-resourcemanager-2\.7\.[0-9].*\.jar +hadoop-yarn-registry.*\.jar +hadoop-yarn-server-sharedcachemanager-2\.7\.[0-9].*\.jar +hadoop-yarn-client-2\.7\.[0-9].*\.jar +hadoop-yarn-applications-distributedshell.*\.jar +hadoop-yarn-server-nodemanager-2\.7\.[0-9].*\.jar +hadoop-yarn-api.*\.jar +hadoop-yarn-client.*\.jar +lib +lib/commons-cli-1\.2.*\.jar +lib/leveldbjni-all-1\.8.*\.jar +lib/jaxb-api-2\.2\.2.*\.jar +lib/jettison-1\.1.*\.jar +lib/commons-io-2\.4.*\.jar +lib/jetty-util-6\.1\.26.*\.jar +lib/jaxb-impl-2\.2\.3-1.*\.jar +lib/jersey-guice-1\.9.*\.jar +lib/netty-3\.6\.2\.Final.*\.jar +lib/jersey-core-1\.9.*\.jar +lib/jackson-mapper-asl-1\.9\.13.*\.jar +lib/asm-3\.2.*\.jar +lib/commons-compress-1\.4\.1.*\.jar +lib/aopalliance-1\.0.*\.jar +lib/jackson-xc-1\.9\.13.*\.jar +lib/jersey-json-1\.9.*\.jar +lib/commons-codec-1\.4.*\.jar +lib/jackson-core-asl-1\.9\.13.*\.jar +lib/servlet-api-2\.5.*\.jar +lib/jetty-6\.1\.26.*\.jar +lib/jersey-server-1\.9.*\.jar +lib/log4j-1\.2\.17.*\.jar +lib/zookeeper-3\.4\.6.*-tests\.jar +lib/stax-api-1\.0-2.*\.jar +lib/jersey-client-1\.9.*\.jar +lib/xz-1\.0.*\.jar +lib/zookeeper-3\.4\.6.*\.jar +lib/activation-1\.1.*\.jar +lib/javax\.inject-1.*\.jar +lib/protobuf-java-2\.5\.0.*\.jar +lib/guice-3\.0.*\.jar +lib/guava-11\.0\.2.*\.jar +lib/jsr305-3\.0\.0.*\.jar +lib/jackson-jaxrs-1\.9\.13.*\.jar +lib/commons-collections-3\.2\.[1-2].*\.jar +lib/commons-logging-1\.1\.3.*\.jar +lib/commons-lang-2\.6.*\.jar +lib/guice-servlet-3\.0.*\.jar +hadoop-yarn-server-applicationhistoryservice.*\.jar +hadoop-yarn-applications-unmanaged-am-launcher.*\.jar diff --git a/bigtop-tests/smoke-tests/odpi-runtime/src/test/resources/testRuntimeSpecConf.groovy b/bigtop-tests/smoke-tests/odpi-runtime/src/test/resources/testRuntimeSpecConf.groovy new file mode 100644 index 00000000..339de4cb --- /dev/null +++ b/bigtop-tests/smoke-tests/odpi-runtime/src/test/resources/testRuntimeSpecConf.groovy @@ -0,0 +1,430 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + *

+ * http://www.apache.org/licenses/LICENSE-2.0 + *

+ * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +specs { + tests { + 'HADOOP_EJH1' { + name = 'HADOOP_EJH1' + type = 'envdir' + arguments { + envcmd = 'hadoop envvars' + variable = 'JAVA_HOME' + } + } + 'HADOOP_EC1' { + name = 'HADOOP_EC1' + type = 'envdir' + arguments { + envcmd = 'hadoop envvars' + variable = 'HADOOP_TOOLS_PATH' + donotcheckexistance = true + } + } + 'HADOOP_EC2' { + name = 'HADOOP_EC2' + type = 'envdir' + arguments { + envcmd = 'hadoop envvars' + variable = 'HADOOP_COMMON_HOME' + } + } + 'HADOOP_EC3' { + name = 'HADOOP_EC3' + type = 'envdir' + arguments { + envcmd = 'hadoop envvars' + variable = 'HADOOP_COMMON_DIR' + relative = true + } + } + 'HADOOP_EC4' { + name = 'HADOOP_EC4' + type = 'envdir' + arguments { + envcmd = 'hadoop envvars' + variable = 'HADOOP_COMMON_LIB_JARS_DIR' + relative = true + } + } + 'HADOOP_EC5' { + name = 'HADOOP_EC5' + type = 'envdir' + arguments { + envcmd = 'hadoop envvars' + variable = 'HADOOP_CONF_DIR' + } + } + 'HADOOP_EH1' { + name = 'HADOOP_EH1' + type = 'envdir' + arguments { + envcmd = 'hdfs envvars' + variable = 'HADOOP_HDFS_HOME' + } + } + 'HADOOP_EH2' { + name = 'HADOOP_EH2' + type = 'envdir' + arguments { + envcmd = 'hdfs envvars' + variable = 'HDFS_DIR' + relative = true + } + } + 'HADOOP_EH3' { + name = 'HADOOP_EH3' + type = 'envdir' + arguments { + envcmd = 'hdfs envvars' + variable = 'HDFS_LIB_JARS_DIR' + relative = true + } + } + 'HADOOP_EY1' { + name = 'HADOOP_EY1' + type = 'envdir' + arguments { + envcmd = 'yarn envvars' + variable = 'HADOOP_YARN_HOME' + } + } + 'HADOOP_EY2' { + name = 'HADOOP_EY2' + type = 'envdir' + arguments { + envcmd = 'yarn envvars' + variable = 'YARN_DIR' + relative = true + } + } + 'HADOOP_EY3' { + name = 'HADOOP_EY3' + type = 'envdir' + arguments { + envcmd = 'yarn envvars' + variable = 'YARN_LIB_JARS_DIR' + relative = true + } + } + 'HADOOP_EM1' { + name = 'HADOOP_EM1' + type = 'envdir' + arguments { + envcmd = 'mapred envvars' + variable = 'HADOOP_MAPRED_HOME' + } + } + 'HADOOP_EM2' { + name = 'HADOOP_EM2' + type = 'envdir' + arguments { + envcmd = 'mapred envvars' + variable = 'MAPRED_DIR' + relative = true + } + } + 'HADOOP_EM3' { + name = 'HADOOP_EM3' + type = 'envdir' + arguments { + envcmd = 'mapred envvars' + variable = 'MAPRED_LIB_JARS_DIR' + relative = true + } + } + 'HADOOP_EJH2_HADOOP' { + name = 'HADOOP_EJH2_HADOOP' + type = 'shell' + arguments { + command = '[ "${JAVA_HOME}xxx" != "xxx" ] || grep -E "^\\s*export\\s+JAVA_HOME=[\\w/]+" `hadoop envvars | grep HADOOP_CONF_DIR | sed "s|[^=]\\+=\'\\([^\']\\+\\)\'$|\\1|g"`/hadoop-env.sh' + message = 'JAVA_HOME is not set' + } + } + 'HADOOP_EJH2_YARN' { + name = 'HADOOP_EJH2_YARN' + type = 'shell' + arguments { + command = '[ "${JAVA_HOME}xxx" != "xxx" ] || grep -E "^\\s*export\\s+JAVA_HOME=[\\w/]+" `hadoop envvars | grep HADOOP_CONF_DIR | sed "s|[^=]\\+=\'\\([^\']\\+\\)\'$|\\1|g"`/yarn-env.sh' + message = 'JAVA_HOME is not set' + } + } + 'HADOOP_PLATVER_1' { + name = 'HADOOP_PLATVER' + type = 'shell' + arguments { + command = 'hadoop version | head -n 1 | grep -E \'Hadoop\\s+[0-9\\.]+[_\\-][A-Za-z_0-9]+\'' + message = 'Hadoop\'s version string is not correct' + } + } + 'HADOOP_DIRSTRUCT_COMMON' { + name = 'HADOOP_DIRSTRUCT_COMMON' + type = 'dirstruct' + arguments { + envcmd = 'hadoop envvars' + baseDirEnv = 'HADOOP_COMMON_HOME' + referenceList = 'hadoop-common.list' + } + } + 'HADOOP_DIRSTRUCT_HDFS' { + name = 'HADOOP_DIRSTRUCT_HDFS' + type = 'dirstruct' + arguments { + envcmd = 'hdfs envvars' + baseDirEnv = 'HADOOP_HDFS_HOME' + referenceList = 'hadoop-hdfs.list' + } + } + 'HADOOP_DIRSTRUCT_MAPRED' { + name = 'HADOOP_DIRSTRUCT_MAPRED' + type = 'dirstruct' + arguments { + envcmd = 'mapred envvars' + baseDirEnv = 'HADOOP_MAPRED_HOME' + referenceList = 'hadoop-mapreduce.list' + } + } + 'HADOOP_DIRSTRUCT_YARN' { + name = 'HADOOP_DIRSTRUCT_YARN' + type = 'dirstruct' + arguments { + envcmd = 'yarn envvars' + baseDirEnv = 'HADOOP_YARN_HOME' + referenceList = 'hadoop-yarn.list' + } + } + 'HADOOP_SUBPROJS' { + name = 'HADOOP_SUBPROJS' + type = 'dirstruct' + arguments { + envcmd = 'hadoop envvars' + baseDirEnv = 'HADOOP_COMMON_HOME' + referenceList = 'hadoop-subprojs.list' + } + } + 'HADOOP_BINCONTENT_COMMON' { + name = 'HADOOP_BINCONTENT_COMMON' + type = 'dirstruct' + arguments { + envcmd = 'hadoop envvars' + baseDirEnv = 'HADOOP_COMMON_HOME' + subDir = 'bin' + referenceList = 'hadoop-common-bin.list' + } + } + 'HADOOP_BINCONTENT_HDFS' { + name = 'HADOOP_BINCONTENT_HDFS' + type = 'dirstruct' + arguments { + envcmd = 'hdfs envvars' + baseDirEnv = 'HADOOP_HDFS_HOME' + subDir = 'bin' + referenceList = 'hadoop-hdfs-bin.list' + } + } + 'HADOOP_BINCONTENT_MAPRED' { + name = 'HADOOP_BINCONTENT_MAPRED' + type = 'dirstruct' + arguments { + envcmd = 'mapred envvars' + baseDirEnv = 'HADOOP_MAPRED_HOME' + subDir = 'bin' + referenceList = 'hadoop-mapreduce-bin.list' + } + } + 'HADOOP_BINCONTENT_YARN' { + name = 'HADOOP_BINCONTENT_YARN' + type = 'dirstruct' + arguments { + envcmd = 'yarn envvars' + baseDirEnv = 'HADOOP_YARN_HOME' + subDir = 'bin' + referenceList = 'hadoop-yarn-bin.list' + } + } + 'HADOOP_LIBJARSCONTENT_COMMON' { + name = 'HADOOP_JARCONTENT_COMMON' + type = 'dirstruct' + arguments { + envcmd = 'hadoop envvars' + baseDirEnv = 'HADOOP_COMMON_HOME' + subDirEnv = 'HADOOP_COMMON_LIB_JARS_DIR' + referenceList = 'hadoop-common-jar.list' + } + } + 'HADOOP_LIBJARSCONTENT_HDFS' { + name = 'HADOOP_JARCONTENT_HDFS' + type = 'dirstruct' + arguments { + envcmd = 'hdfs envvars' + baseDirEnv = 'HADOOP_HDFS_HOME' + subDirEnv = 'HDFS_LIB_JARS_DIR' + referenceList = 'hadoop-hdfs-jar.list' + } + } + 'HADOOP_LIBJARSCONTENT_MAPRED' { + name = 'HADOOP_JARCONTENT_MAPRED' + type = 'dirstruct' + arguments { + envcmd = 'mapred envvars' + baseDirEnv = 'HADOOP_MAPRED_HOME' + subDirEnv = 'MAPRED_LIB_JARS_DIR' + referenceList = 'hadoop-mapreduce-jar.list' + } + } + 'HADOOP_LIBJARSCONTENT_YARN' { + name = 'HADOOP_JARCONTENT_YARN' + type = 'dirstruct' + arguments { + envcmd = 'yarn envvars' + baseDirEnv = 'HADOOP_YARN_HOME' + subDirEnv = 'YARN_LIB_JARS_DIR' + referenceList = 'hadoop-yarn-jar.list' + } + } + 'HADOOP_GETCONF' { + name = 'HADOOP_GETCONF' + type = 'shell' + arguments { + command = '[ `hdfs getconf -confKey dfs.permissions.superusergroup >/dev/null 2>/dev/null; echo $?` == "0" ]' + message = 'It\' not possible to to determine key Hadoop configuration values by using ${HADOOP_HDFS_HOME}/bin/hdfs getconf' + } + } + 'HADOOP_CNATIVE1' { + name = 'HADOOP_CNATIVE1' + type = 'shell' + arguments { + command = 'hadoop checknative -a 2>/dev/null | grep hadoop | grep true' + message = 'hadoop-common-project must be build with -Pnative or -Pnative-win' + } + } + 'HADOOP_CNATIVE2' { + name = 'HADOOP_CNATIVE2' + type = 'shell' + arguments { + command = 'hadoop checknative -a 2>/dev/null | grep snappy | grep true' + message = 'hadoop-common-project must be build with -Prequire.snappy' + } + } + 'HADOOP_HNATIVE1' { + name = 'HADOOP_HNATIVE1' + type = 'shell' + arguments { + command = '[ ! -n ${HADOOP_COMMON_HOME} ] || HADOOP_COMMON_HOME=`hadoop envvars | grep HADOOP_COMMON_HOME | sed "s/.*=\'\\(.*\\)\'/\\1/"`; '+ + 'test -e $HADOOP_COMMON_HOME/lib/native/libhdfs.a' + message = 'hadoop-hdfs-project must be build with -Pnative or -Pnative-win' + } + } + 'HADOOP_YNATIVE1' { + name = 'HADOOP_YNATIVE1' + type = 'shell' + arguments { + command = '[ ! -n ${HADOOP_YARN_HOME} ] || HADOOP_YARN_HOME=`yarn envvars | grep HADOOP_YARN_HOME | sed "s/.*=\'\\(.*\\)\'/\\1/"`; '+ + 'echo $HADOOP_YARN_HOME; test -e $HADOOP_YARN_HOME/bin/container-executor' + message = 'hadoop-yarn-project must be build with -Pnative or -Pnative-win' + } + } + 'HADOOP_MNATIVE1' { + name = 'HADOOP_MNATIVE1' + type = 'shell' + arguments { + command = 'hadoop checknative -a 2>/dev/null | grep snappy | grep true' + message = 'hadoop-mapreduce-project must be build with -Prequire.snappy' + } + } + 'HADOOP_COMPRESSION' { + name = 'HADOOP_COMPRESSION' + type = 'shell' + arguments { + command = '[[ "$(hadoop checknative -a 2>/dev/null | egrep -e ^zlib -e ^snappy | sort -u | grep true | wc -l)" == 2 ]]' + message = 'hadoop must be built with -Dcompile.native=true' + } + } + 'HADOOP_TOOLS' { + name = 'HADOOP_TOOLS' + type = 'hadoop_tools' + arguments { + } + } + 'HADOOP_API1' { + name = "HADOOP_API1" + type = 'api_examination' + arguments { + baseDirEnv = 'HADOOP_COMMON_HOME' + libDir = 'HADOOP_COMMON_DIR' + envcmd = 'hadoop envvars' + jar = 'hadoop-common' + resourceFile = 'hadoop-common-2.7.3-api-report.json' + } + } + 'HADOOP_API2' { + name = "HADOOP_API2" + type = 'api_examination' + arguments { + baseDirEnv = 'HADOOP_HDFS_HOME' + libDir = 'HDFS_DIR' + envcmd = 'hdfs envvars' + jar = 'hadoop-hdfs' + resourceFile = 'hadoop-hdfs-2.7.3-api-report.json' + } + } + 'HADOOP_API3' { + name = "HADOOP_API3" + type = 'api_examination' + arguments { + baseDirEnv = 'HADOOP_YARN_HOME' + libDir = 'YARN_DIR' + envcmd = 'yarn envvars' + jar = 'hadoop-yarn-common' + resourceFile = 'hadoop-yarn-common-2.7.3-api-report.json' + } + } + 'HADOOP_API4' { + name = "HADOOP_API4" + type = 'api_examination' + arguments { + baseDirEnv = 'HADOOP_YARN_HOME' + libDir = 'YARN_DIR' + envcmd = 'yarn envvars' + jar = 'hadoop-yarn-client' + resourceFile = 'hadoop-yarn-client-2.7.3-api-report.json' + } + } + 'HADOOP_API5' { + name = "HADOOP_API5" + type = 'api_examination' + arguments { + baseDirEnv = 'HADOOP_YARN_HOME' + libDir = 'YARN_DIR' + envcmd = 'yarn envvars' + jar = 'hadoop-yarn-api' + resourceFile = 'hadoop-yarn-api-2.7.3-api-report.json' + } + } + 'HADOOP_API6' { + name = "HADOOP_API6" + type = 'api_examination' + arguments { + baseDirEnv = 'HADOOP_MAPRED_HOME' + libDir = 'MAPRED_DIR' + envcmd = 'mapred envvars' + jar = 'hadoop-mapreduce-client-core' + resourceFile = 'hadoop-mapreduce-client-core-2.7.3-api-report.json' + } + } + } +} diff --git a/bigtop-tests/spec-tests/README.md b/bigtop-tests/spec-tests/README.md deleted file mode 100644 index 8fde997f..00000000 --- a/bigtop-tests/spec-tests/README.md +++ /dev/null @@ -1,48 +0,0 @@ -Licensed to the Apache Software Foundation (ASF) under one or more -contributor license agreements. See the NOTICE file distributed with -this work for additional information regarding copyright ownership. -The ASF licenses this file to You under the Apache License, Version 2.0 -(the "License"); you may not use this file except in compliance with -the License. You may obtain a copy of the License at - -http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. - -Test suite to validate Hadoop basic specifications -================================================== - -The test suite is intended to be used as a validation tool to make sure that a -Hadoop stack derived from Apache Bigtop is still compliant with it. The -minimalistic way of doing so would be to guarantee compatibility of the -environment, binaries layouts, certain configuration parameters, and so on. - -Validation test suite for the specs is vaguely based on Apache Bigtop iTest and -consists of two essential parts: a configuration file, communicating the -functional commands and expected outcome(s) of it; and the test driver to run -the commands and compare the results. - -Running the tests -================= - -Tests could be executed by running the following command -``` - gradle :bigtop-tests:spec-tests:runtime:test -Pspec.tests --info -``` -======= -consists of two essential parts: a configuration file, communicating the -functional commands and expected outcome(s) of it; and the test driver to run -the commands and compare the results. - -Running the tests -================= - -Tests could be executed by running the following command -``` - gradle :bigtop-tests:spec-tests:runtime:test -Pspec.tests --info -``` - diff --git a/bigtop-tests/spec-tests/build.gradle b/bigtop-tests/spec-tests/build.gradle deleted file mode 100644 index b0a67150..00000000 --- a/bigtop-tests/spec-tests/build.gradle +++ /dev/null @@ -1,63 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 - *

- * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -subprojects { - /** - * Utility function for tests to use to confirm EVN Variables. - */ - ext.checkEnv = { env_vars -> - env_vars.each() { - def value = System.getenv("${it}") - if (value == null || value == "null") - throw new GradleException("undeclared env variable: ${it}") - } - } - - ext.groovyVersion = '1.8.0' - ext.hadoopVersion = '2.6.0' - // itest needs be greater than or equal to = 1.0.0 - ext.itestVersion = '1.0.0' // Might need to be able to read an input for alternate version? - ext.BIGTOP_HOME = rootDir - - dependencies { - compile group: 'org.apache.bigtop.itest', name: 'itest-common', version: itestVersion, transitive: 'true' - //needed to avoid groovy not on classpath error. - testCompile group: 'org.codehaus.groovy', name: 'groovy', version: groovyVersion - testRuntime project(':bigtop-tests:smoke-tests:logger-test-config') - } - - test.doFirst { - // TestHadoopExamples and other tests rely on BIGTOP_HOME environment - // variable to find some resources. Let's set it up, using ext.BIGTOP_HOME - environment ("BIGTOP_HOME", BIGTOP_HOME) - } - - test << { - println("Now testing..."); - //todo, add back in 'basic' after BIGTOP-1392 . - testLogging { - events "passed", "skipped", "failed" - } - } - - // Let's make sure all system Properties are passed into the forked test JVM - tasks.withType(Test) { - systemProperties = System.getProperties() - } - test.dependsOn compileGroovy - compileGroovy.dependsOn clean -} diff --git a/bigtop-tests/spec-tests/runtime/build.gradle b/bigtop-tests/spec-tests/runtime/build.gradle deleted file mode 100644 index 97e36353..00000000 --- a/bigtop-tests/spec-tests/runtime/build.gradle +++ /dev/null @@ -1,63 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 - *

- * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -def junitVersion = '4.11' - -apply plugin: 'java' - -repositories { - maven { - url "http://conjars.org/repo/" - } -} -dependencies { - compile group: 'junit', name: 'junit', version: junitVersion, transitive: 'true' - compile group: 'commons-logging', name: 'commons-logging', version: '1.1.3' - compile group: 'org.apache.commons', name: 'commons-exec', version: '1.3' - compile group: 'org.apache.hive', name: 'hive-jdbc', version: '1.2.1' - compile group: 'org.apache.hive', name: 'hive-metastore', version: '1.2.1' - compile group: 'org.apache.hive', name: 'hive-common', version: '1.2.1' - compile group: 'org.apache.thrift', name: 'libfb303', version: '0.9.3' - compile group: 'org.apache.thrift', name: 'libthrift', version: '0.9.3' - compile group: 'org.apache.hadoop', name: 'hadoop-common', version: '2.7.2' - compile group: 'org.apache.hive.hcatalog', name: 'hive-hcatalog-core', version: '1.2.1' - testCompile group: 'org.apache.hadoop', name: 'hadoop-mapreduce-client-core', version: '2.7.2' - compile group: 'org.apache.hadoop', name: 'hadoop-mapreduce-client-jobclient', version: '2.7.2' - testCompile group: 'org.apache.hadoop', name: 'hadoop-mapreduce-client-common', version: '2.7.2' - testCompile group: 'org.apache.hadoop', name: 'hadoop-hdfs', version: '2.7.2' - testCompile group: 'org.apache.hive', name: 'hive-exec', version: '1.2.1' - testCompile "junit:junit:4.11" - if (System.env.HADOOP_CONF_DIR) testRuntime files(System.env.HADOOP_CONF_DIR) -} - -jar { - from { - (configurations.runtime).grep{it.toString() =~ /(hive|libfb303)-.*[jw]ar$/}.collect { - zipTree(it) - } - } - - exclude 'META-INF/*.RSA', 'META-INF/*.SF','META-INF/*.DSA' -} - -test { - // Change the default location where test data is picked up - systemProperty 'test.resources.dir', "${buildDir}/resources/test/" - systemProperty 'odpi.test.hive.hcat.job.jar', jar.archivePath - systemProperty 'odpi.test.hive.hcat.core.jar', (configurations.runtime).find { it.toString() =~ /hive-hcatalog-core-.*jar$/ } -} -test.dependsOn jar diff --git a/bigtop-tests/spec-tests/runtime/src/main/java/org/odpi/specs/runtime/hadoop/ApiExaminer.java b/bigtop-tests/spec-tests/runtime/src/main/java/org/odpi/specs/runtime/hadoop/ApiExaminer.java deleted file mode 100644 index d95c010d..00000000 --- a/bigtop-tests/spec-tests/runtime/src/main/java/org/odpi/specs/runtime/hadoop/ApiExaminer.java +++ /dev/null @@ -1,485 +0,0 @@ -/** - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 - *

- * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.odpi.specs.runtime.hadoop; - -import org.apache.commons.cli.CommandLine; -import org.apache.commons.cli.GnuParser; -import org.apache.commons.cli.HelpFormatter; -import org.apache.commons.cli.Options; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; -import org.apache.hadoop.classification.InterfaceAudience; -import org.apache.hadoop.classification.InterfaceStability; -import org.codehaus.jackson.annotate.JsonIgnore; -import org.codehaus.jackson.map.ObjectMapper; - -import java.io.File; -import java.io.IOException; -import java.lang.reflect.Method; -import java.util.ArrayList; -import java.util.Enumeration; -import java.util.HashMap; -import java.util.HashSet; -import java.util.List; -import java.util.Map; -import java.util.Set; -import java.util.jar.JarEntry; -import java.util.jar.JarFile; -import java.util.regex.Matcher; -import java.util.regex.Pattern; - -/** - * A tool that generates API conformance tests for Hadoop libraries - */ -public class ApiExaminer { - - private static final Log LOG = LogFactory.getLog(ApiExaminer.class.getName()); - - static private Set unloadableClasses; - - private List errors; - private List warnings; - - static { - unloadableClasses = new HashSet<>(); - unloadableClasses.add("org.apache.hadoop.security.JniBasedUnixGroupsMapping"); - unloadableClasses.add("org.apache.hadoop.security.JniBasedUnixGroupsNetgroupMapping"); - unloadableClasses.add("org.apache.hadoop.io.compress.lz4.Lz4Compressor"); - unloadableClasses.add("org.apache.hadoop.record.compiler.ant.RccTask"); - - } - - public static void main(String[] args) { - Options options = new Options(); - - options.addOption("c", "compare", true, - "Compare against a spec, argument is the json file containing spec"); - options.addOption("h", "help", false, "You're looking at it"); - options.addOption("j", "jar", true, "Jar to examine"); - options.addOption("p", "prepare-spec", true, - "Prepare the spec, argument is the directory to write the spec to"); - - try { - CommandLine cli = new GnuParser().parse(options, args); - - if (cli.hasOption('h')) { - usage(options); - return; - } - - if ((!cli.hasOption('c') && !cli.hasOption('p')) || - (cli.hasOption('c') && cli.hasOption('p'))) { - System.err.println("You must choose either -c or -p"); - usage(options); - return; - } - - if (!cli.hasOption('j')) { - System.err.println("You must specify the jar to prepare or compare"); - usage(options); - return; - } - - String jar = cli.getOptionValue('j'); - ApiExaminer examiner = new ApiExaminer(); - - if (cli.hasOption('c')) { - examiner.compareAgainstStandard(cli.getOptionValue('c'), jar); - } else if (cli.hasOption('p')) { - examiner.prepareExpected(jar, cli.getOptionValue('p')); - } - } catch (Exception e) { - System.err.println("Received exception while processing"); - e.printStackTrace(); - } - } - - private static void usage(Options options) { - HelpFormatter help = new HelpFormatter(); - help.printHelp("api-examiner", options); - - } - - private ApiExaminer() { - } - - private void prepareExpected(String jarFile, String outputDir) throws IOException, - ClassNotFoundException { - JarInfo jarInfo = new JarInfo(jarFile, this); - jarInfo.dumpToFile(new File(outputDir)); - } - - private void compareAgainstStandard(String json, String jarFile) throws IOException, - ClassNotFoundException { - errors = new ArrayList<>(); - warnings = new ArrayList<>(); - JarInfo underTest = new JarInfo(jarFile, this); - JarInfo standard = jarInfoFromFile(new File(json)); - standard.compareAndReport(underTest); - - if (errors.size() > 0) { - System.err.println("Found " + errors.size() + " incompatibilities:"); - for (String error : errors) { - System.err.println(error); - } - } - - if (warnings.size() > 0) { - System.err.println("Found " + warnings.size() + " possible issues: "); - for (String warning : warnings) { - System.err.println(warning); - } - } - - - } - - private JarInfo jarInfoFromFile(File inputFile) throws IOException { - ObjectMapper mapper = new ObjectMapper(); - JarInfo jarInfo = mapper.readValue(inputFile, JarInfo.class); - jarInfo.patchUpClassBackPointers(this); - return jarInfo; - } - - private static class JarInfo { - String name; - String version; - ApiExaminer container; - Map classes; - - // For use by Jackson - public JarInfo() { - - } - - JarInfo(String jarFile, ApiExaminer container) throws IOException, ClassNotFoundException { - this.container = container; - LOG.info("Processing jar " + jarFile); - File f = new File(jarFile); - Pattern pattern = Pattern.compile("(hadoop-[a-z\\-]+)-([0-9]\\.[0-9]\\.[0-9]).*"); - Matcher matcher = pattern.matcher(f.getName()); - if (!matcher.matches()) { - String msg = "Unable to determine name and version from " + f.getName(); - LOG.error(msg); - throw new RuntimeException(msg); - } - name = matcher.group(1); - version = matcher.group(2); - classes = new HashMap<>(); - - JarFile jar = new JarFile(jarFile); - Enumeration entries = jar.entries(); - while (entries.hasMoreElements()) { - String name = entries.nextElement().getName(); - if (name.endsWith(".class")) { - name = name.substring(0, name.length() - 6); - name = name.replace('/', '.'); - if (!unloadableClasses.contains(name)) { - LOG.debug("Processing class " + name); - Class clazz = Class.forName(name); - if (clazz.getAnnotation(InterfaceAudience.Public.class) != null && - clazz.getAnnotation(InterfaceStability.Stable.class) != null) { - classes.put(name, new ClassInfo(this, clazz)); - } - } - } - } - } - - public String getName() { - return name; - } - - public void setName(String name) { - this.name = name; - } - - public String getVersion() { - return version; - } - - public void setVersion(String version) { - this.version = version; - } - - public Map getClasses() { - return classes; - } - - public void setClasses(Map classes) { - this.classes = classes; - } - - void compareAndReport(JarInfo underTest) { - Set underTestClasses = new HashSet<>(underTest.classes.values()); - for (ClassInfo classInfo : classes.values()) { - if (underTestClasses.contains(classInfo)) { - classInfo.compareAndReport(underTest.classes.get(classInfo.name)); - underTestClasses.remove(classInfo); - } else { - container.errors.add(underTest + " does not contain class " + classInfo); - } - } - - if (underTestClasses.size() > 0) { - for (ClassInfo extra : underTestClasses) { - container.warnings.add(underTest + " contains extra class " + extra); - } - } - } - - void dumpToFile(File outputDir) throws IOException { - File output = new File(outputDir, name + "-" + version + "-api-report.json"); - ObjectMapper mapper = new ObjectMapper(); - mapper.writeValue(output, this); - } - - void patchUpClassBackPointers(ApiExaminer container) { - this.container = container; - for (ClassInfo classInfo : classes.values()) { - classInfo.setJar(this); - classInfo.patchUpBackMethodBackPointers(); - } - } - - @Override - public boolean equals(Object other) { - if (!(other instanceof JarInfo)) return false; - JarInfo that = (JarInfo)other; - return name.equals(that.name) && version.equals(that.version); - } - - @Override - public String toString() { - return name + "-" + version; - } - } - - private static class ClassInfo { - @JsonIgnore JarInfo jar; - String name; - Map methods; - - // For use by Jackson - public ClassInfo() { - - } - - ClassInfo(JarInfo jar, Class clazz) { - this.jar = jar; - this.name = clazz.getName(); - methods = new HashMap<>(); - - for (Method method : clazz.getMethods()) { - if (method.getDeclaringClass().equals(clazz)) { - LOG.debug("Processing method " + method.getName()); - MethodInfo mi = new MethodInfo(this, method); - methods.put(mi.toString(), mi); - } - } - } - - public JarInfo getJar() { - return jar; - } - - public void setJar(JarInfo jar) { - this.jar = jar; - } - - public String getName() { - return name; - } - - public void setName(String name) { - this.name = name; - } - - public Map getMethods() { - return methods; - } - - public void setMethods(Map methods) { - this.methods = methods; - } - - void compareAndReport(ClassInfo underTest) { - // Make a copy so we can remove them as we match them, making it easy to find additional ones - Set underTestMethods = new HashSet<>(underTest.methods.values()); - for (MethodInfo methodInfo : methods.values()) { - if (underTestMethods.contains(methodInfo)) { - methodInfo.compareAndReport(underTest.methods.get(methodInfo.toString())); - underTestMethods.remove(methodInfo); - } else { - jar.container.errors.add(underTest + " does not contain method " + methodInfo); - } - } - - if (underTestMethods.size() > 0) { - for (MethodInfo extra : underTestMethods) { - jar.container.warnings.add(underTest + " contains extra method " + extra); - } - } - } - - void patchUpBackMethodBackPointers() { - for (MethodInfo methodInfo : methods.values()) methodInfo.setContainingClass(this); - } - - @Override - public boolean equals(Object other) { - if (!(other instanceof ClassInfo)) return false; - ClassInfo that = (ClassInfo)other; - return name.equals(that.name); // Classes can be compared just on names - } - - @Override - public int hashCode() { - return name.hashCode(); - } - - @Override - public String toString() { - return jar + " " + name; - } - } - - private static class MethodInfo { - @JsonIgnore ClassInfo containingClass; - String name; - String returnType; - List args; - Set exceptions; - - // For use by Jackson - public MethodInfo() { - - } - - MethodInfo(ClassInfo containingClass, Method method) { - this.containingClass = containingClass; - this.name = method.getName(); - args = new ArrayList<>(); - for (Class argClass : method.getParameterTypes()) { - args.add(argClass.getName()); - } - returnType = method.getReturnType().getName(); - exceptions = new HashSet<>(); - for (Class exception : method.getExceptionTypes()) { - exceptions.add(exception.getName()); - } - } - - public ClassInfo getContainingClass() { - return containingClass; - } - - public void setContainingClass(ClassInfo containingClass) { - this.containingClass = containingClass; - } - - public String getName() { - return name; - } - - public void setName(String name) { - this.name = name; - } - - public String getReturnType() { - return returnType; - } - - public void setReturnType(String returnType) { - this.returnType = returnType; - } - - public List getArgs() { - return args; - } - - public void setArgs(List args) { - this.args = args; - } - - public Set getExceptions() { - return exceptions; - } - - public void setExceptions(Set exceptions) { - this.exceptions = exceptions; - } - - void compareAndReport(MethodInfo underTest) { - // Check to see if they've added or removed exceptions - // Make a copy so I can remove them as I check them off and easily find any that have been - // added. - Set underTestExceptions = new HashSet<>(underTest.exceptions); - for (String exception : exceptions) { - if (underTest.exceptions.contains(exception)) { - underTestExceptions.remove(exception); - } else { - containingClass.jar.container.warnings.add(underTest.containingClass.jar + " " + - underTest.containingClass + "." + name + " removes exception " + exception); - } - } - if (underTestExceptions.size() > 0) { - for (String underTestException : underTest.exceptions) { - containingClass.jar.container.warnings.add(underTest.containingClass.jar + " " + - underTest.containingClass + "." + name + " adds exception " + underTestException); - } - } - } - - @Override - public boolean equals(Object other) { - if (!(other instanceof MethodInfo)) return false; - MethodInfo that = (MethodInfo)other; - - return containingClass.equals(that.containingClass) && name.equals(that.name) && - returnType.equals(that.returnType) && args.equals(that.args); - } - - @Override - public int hashCode() { - return ((containingClass.hashCode() * 31 + name.hashCode()) * 31 + returnType.hashCode()) * 31 + - args.hashCode(); - } - - @Override - public String toString() { - StringBuilder buf = new StringBuilder(returnType) - .append(" ") - .append(name) - .append('('); - boolean first = true; - for (String arg : args) { - if (first) first = false; - else buf.append(", "); - buf.append(arg); - } - buf.append(")"); - if (exceptions.size() > 0) { - buf.append(" throws "); - first = true; - for (String exception : exceptions) { - if (first) first = false; - else buf.append(", "); - buf.append(exception); - } - } - return buf.toString(); - } - } -} diff --git a/bigtop-tests/spec-tests/runtime/src/main/java/org/odpi/specs/runtime/hive/HCatalogMR.java b/bigtop-tests/spec-tests/runtime/src/main/java/org/odpi/specs/runtime/hive/HCatalogMR.java deleted file mode 100644 index 4110d5d6..00000000 --- a/bigtop-tests/spec-tests/runtime/src/main/java/org/odpi/specs/runtime/hive/HCatalogMR.java +++ /dev/null @@ -1,137 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 - *

- * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.odpi.specs.runtime.hive; - -import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.conf.Configured; -import org.apache.hadoop.io.IntWritable; -import org.apache.hadoop.io.Text; -import org.apache.hadoop.io.WritableComparable; -import org.apache.hadoop.mapreduce.Job; -import org.apache.hadoop.mapreduce.Mapper; -import org.apache.hadoop.mapreduce.Reducer; -import org.apache.hadoop.util.GenericOptionsParser; -import org.apache.hadoop.util.Tool; -import org.apache.hadoop.util.ToolRunner; -import org.apache.hive.hcatalog.data.DefaultHCatRecord; -import org.apache.hive.hcatalog.data.HCatRecord; -import org.apache.hive.hcatalog.data.schema.HCatSchema; -import org.apache.hive.hcatalog.data.schema.HCatSchemaUtils; -import org.apache.hive.hcatalog.mapreduce.HCatInputFormat; -import org.apache.hive.hcatalog.mapreduce.HCatOutputFormat; -import org.apache.hive.hcatalog.mapreduce.OutputJobInfo; - -import java.io.IOException; -import java.net.URI; -import java.util.StringTokenizer; - -public class HCatalogMR extends Configured implements Tool { - private final static String INPUT_SCHEMA = "odpi.test.hcat.schema.input"; - private final static String OUTPUT_SCHEMA = "odpi.test.hcat.schema.output"; - - @Override - public int run(String[] args) throws Exception { - String inputTable = null; - String outputTable = null; - String inputSchemaStr = null; - String outputSchemaStr = null; - for(int i = 0; i < args.length; i++){ - if(args[i].equalsIgnoreCase("-it")){ - inputTable = args[i+1]; - }else if(args[i].equalsIgnoreCase("-ot")){ - outputTable = args[i+1]; - }else if(args[i].equalsIgnoreCase("-is")){ - inputSchemaStr = args[i+1]; - }else if(args[i].equalsIgnoreCase("-os")){ - outputSchemaStr = args[i+1]; - } - } - - Configuration conf = getConf(); - args = new GenericOptionsParser(conf, args).getRemainingArgs(); - - conf.set(INPUT_SCHEMA, inputSchemaStr); - conf.set(OUTPUT_SCHEMA, outputSchemaStr); - - Job job = new Job(conf, "odpi_hcat_test"); - HCatInputFormat.setInput(job, "default", inputTable); - - job.setInputFormatClass(HCatInputFormat.class); - job.setJarByClass(HCatalogMR.class); - job.setMapperClass(Map.class); - job.setReducerClass(Reduce.class); - job.setMapOutputKeyClass(Text.class); - job.setMapOutputValueClass(IntWritable.class); - job.setOutputKeyClass(WritableComparable.class); - job.setOutputValueClass(HCatRecord.class); - HCatOutputFormat.setOutput(job, OutputJobInfo.create("default", outputTable, null)); - HCatOutputFormat.setSchema(job, HCatSchemaUtils.getHCatSchema(outputSchemaStr)); - job.setOutputFormatClass(HCatOutputFormat.class); - - return job.waitForCompletion(true) ? 0 : 1; - - - } - public static class Map extends Mapper { - private final static IntWritable one = new IntWritable(1); - private Text word = new Text(); - private HCatSchema inputSchema = null; - - @Override - protected void map(WritableComparable key, HCatRecord value, Context context) - throws IOException, InterruptedException { - if (inputSchema == null) { - inputSchema = - HCatSchemaUtils.getHCatSchema(context.getConfiguration().get(INPUT_SCHEMA)); - } - String line = value.getString("line", inputSchema); - StringTokenizer tokenizer = new StringTokenizer(line); - while (tokenizer.hasMoreTokens()) { - word.set(tokenizer.nextToken()); - context.write(word, one); - } - } - } - - public static class Reduce extends Reducer { - private HCatSchema outputSchema = null; - - @Override - protected void reduce(Text key, Iterable values, Context context) throws - IOException, InterruptedException { - if (outputSchema == null) { - outputSchema = - HCatSchemaUtils.getHCatSchema(context.getConfiguration().get(OUTPUT_SCHEMA)); - } - int sum = 0; - for (IntWritable i : values) { - sum += i.get(); - } - HCatRecord output = new DefaultHCatRecord(2); - output.set("word", outputSchema, key); - output.set("count", outputSchema, sum); - context.write(null, output); - } - } - - public static void main(String[] args) throws Exception { - int exitCode = ToolRunner.run(new HCatalogMR(), args); - System.exit(exitCode); - } - } diff --git a/bigtop-tests/spec-tests/runtime/src/main/resources/api-examiner-prep.sh b/bigtop-tests/spec-tests/runtime/src/main/resources/api-examiner-prep.sh deleted file mode 100755 index 8c9ab5e6..00000000 --- a/bigtop-tests/spec-tests/runtime/src/main/resources/api-examiner-prep.sh +++ /dev/null @@ -1,64 +0,0 @@ -#!/usr/bin/env bash - -############################################################################ -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -############################################################################ - -############################################################################ -# This script is used to generate the hadoop-*-api.report.json files in the -# test/resources directory. To use it, you will first need to download an -# Apache binary distribution of Hadoop and set APACHE_HADOOP_DIR to the -# directory where you untar that distribution. You will then need to set -# BIGTTOP_HOME to the directory where your bigtop source is located. Then -# run this script for each of the jars you want to generate a report for. -# The arguments passed to this script should be -p -j -# where outputdir is the directory you'd like to write the report to and -# jarfile is the full path of the jar to generate the report for. Reports -# should be generated for the following jars: hadoop-common, hadoop-hdfs, -# hadoop-yarn-common, hadoop-yarn-client, hadoop-yarn-api, and -# hadoop-mapreduce-client-core -# -# Example usage: -# export APACHE_HADOOP_DIR=/tmp/hadoop-2.7.3 -# export BIGTOP_HOME=/home/me/git/bigtop -# $BIGTOP_HOME/bigtop-tests/spec-tests/runtime/src/main/resources/api-examiner.sh -j $HADOOP_HOME/share/hadoop/common/hadoop-common-2.7.3.jar -p $BIGTOP_HOME/bigtop-tests/spec-tests/runtime/src/test/resources -# -# The resulting reports should be committed to git. This script only needs -# to be run once per ODPi release. -############################################################################ - - -if [ "x${APACHE_HADOOP_DIR}" = "x" ] -then - echo "You must set APACHE_HADOOP_DIR to the directory you have placed the Apache Hadoop binary distribution in" - exit 1 -fi - -if [ "x${BIGTOP_HOME}" = "x" ] -then - echo "You must set BIGTOP_HOME to the root directory for your bigtop source" - exit 1 -fi - -for jar in `find $BIGTOP_HOME/bigtop-tests/spec-tests/runtime/build/libs/ -name \*.jar` -do - CLASSPATH=$CLASSPATH:$jar -done - -for jar in `find $APACHE_HADOOP_DIR -name \*.jar` -do - CLASSPATH=$CLASSPATH:$jar -done - -java -cp $CLASSPATH org.odpi.specs.runtime.hadoop.ApiExaminer $@ - diff --git a/bigtop-tests/spec-tests/runtime/src/test/groovy/org/odpi/specs/runtime/TestSpecsRuntime.groovy b/bigtop-tests/spec-tests/runtime/src/test/groovy/org/odpi/specs/runtime/TestSpecsRuntime.groovy deleted file mode 100644 index bc2a3b20..00000000 --- a/bigtop-tests/spec-tests/runtime/src/test/groovy/org/odpi/specs/runtime/TestSpecsRuntime.groovy +++ /dev/null @@ -1,275 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 - *

- * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.odpi.specs.runtime - -import groovy.io.FileType -import org.junit.Assert -import org.apache.bigtop.itest.shell.* -import org.junit.Test -import org.junit.runner.RunWith -import org.junit.runners.Parameterized -import org.junit.runners.Parameterized.Parameters - -import java.util.regex.Matcher -import java.util.regex.Pattern - -/** - * Check all expected environment - * Tests are constructed dynamically, using external DSL to define - * - test name - * - test type - * - command to execute the test - * - expected pattern of the output - */ -@RunWith(Parameterized.class) -public class TestSpecsRuntime { - private String testName - private String type - private Map arguments - - private static ENV = System.getenv() - - @Parameters(name="{0}") - public static Collection allTests() { - List specs = []; - - config.specs.tests.each { test -> - specs.add([test.value.name, test.value.type, test.value.arguments] as Object[]) - } - return specs - } - - public TestSpecsRuntime (String testName, String type, Map arguments) { - this.testName = testName - this.type = type - this.arguments = arguments - } - - public static final String testsList = System.properties['test.resources.dir'] ?: - "${System.properties['buildDir']}/resources/test" - def final static config = new ConfigSlurper().parse(new URL("file:${getTestConfigName()}")) - - private static String getTestConfigName() { - return "$testsList/testRuntimeSpecConf.groovy"; - } - - private Map getEnvMap(String command) { - def envMap = [:] - Shell sh = new Shell() - def envvars = sh.exec(command).getOut() - if (sh.getRet() == 0) { - envvars.each { - def match = it =~ /(?[^=]+)='(?[^']+)'$/ - if ( match.matches() ) { - envMap[match.group('variable')] = match.group('value') - } - } - } - return envMap - } - - private String getEnv(String name, String cmd) { - String value = ENV[name] - if (value == null) { - value = getEnvMap(cmd)[name] - } - return value - } - - @Test - public void testAll() { - switch (type) { - case 'shell': - Shell sh = new Shell() - def output = sh.exec(arguments['command']).getOut().join("\n") - int actualResult = sh.getRet() - int expectedResult = arguments['expectedResult'] ? arguments['expectedResult'] : 0 // use 0 as default success code - Assert.assertTrue("${testName} fail: ${arguments['message']} - '${arguments['command']}' returned ${actualResult} instead of ${expectedResult}", - actualResult == expectedResult) - break - - case 'envdir': - def var = arguments['variable'] - def isPathRelative = arguments['relative'] - def pathString = getEnv(var, arguments['envcmd']) - Assert.assertTrue("${testName} fail: environment variable ${var} does not exist", pathString != null ) - - if ( arguments['pattern'] ) { - Assert.assertTrue("${testName} fail: $pathString doesn't contain expected pattern", - pathString ==~ /${arguments['pattern']}/) - } - - def pathFile = new File(pathString) - if ( isPathRelative ) { - Assert.assertFalse("${testName} fail: ${pathString} is not relative", pathFile.isAbsolute() ) - } else { - if (!arguments['donotcheckexistance']) { - Assert.assertTrue("${testName} fail: ${pathString} does not exist", pathFile.exists() ) - Assert.assertTrue("${testName} fail: ${pathString} is not directory", pathFile.isDirectory() ) - } - } - break - - case 'dirstruct': - def expectedFiles = [] - new File("${testsList}", "${arguments['referenceList']}").eachLine { line -> - expectedFiles << ~line - } - def baseDirEnv = getEnv(arguments['baseDirEnv'], arguments['envcmd']) - Assert.assertNotNull("${baseDirEnv} has to be set for the test to continue", - baseDirEnv) - def root = new File(baseDirEnv) - def actualFiles = [] - def missingFiles = [] - if ( ! root.exists() ) { - Assert.assertFail("${testName} fail: ${baseDirEnv} does not exist!"); - } - - root.eachFileRecurse(FileType.ANY) { file -> - def relPath = new File( root.toURI().relativize( file.toURI() ).toString() ).path - actualFiles << relPath - } - - expectedFiles.each { wantFile -> - def ok = false - for (def x : actualFiles) { - if (actualFiles =~ wantFile) { - ok = true - break - } - } - if (!ok) { - missingFiles << wantFile - } - } - - Assert.assertTrue("${testName} fail: Directory structure for ${baseDirEnv} does not match reference. Missing files: ${missingFiles} ", - missingFiles.size() == 0) - break - - case 'dircontent': - def expectedFiles = [] - new File("${testsList}", "${arguments['referenceList']}").eachLine { line -> - expectedFiles << ~line - } - - def baseDir = getEnv(arguments['baseDirEnv'], arguments['envcmd']) - def subDir = arguments['subDir'] - if (!subDir && arguments['subDirEnv']) { - subDir = getEnv(arguments['subDirEnv'], arguments['envcmd']) - } - - def dir = null - if (subDir) { - dir = new File(baseDir, subDir) - } else { - dir = new File(baseDir) - } - Assert.assertNotNull("Directory has to be set for the test to continue", dir) - - def actualFiles = [] - if (dir.exists()) { - dir.eachFile FileType.FILES, { file -> - def relPath = new File( dir.toURI().relativize( file.toURI() ).toString() ).path - actualFiles << relPath - } - } - - def missingList = [] - for (def wantFile : expectedFiles) { - def ok = false - for (def haveFile : actualFiles) { - if (haveFile =~ wantFile) { - ok = true - break - } - } - if (! ok) { - missingList << wantFile - } - } - - def extraList = [] - for (def haveFile : actualFiles) { - def ok = false - for (def wantFile : expectedFiles) { - if (haveFile =~ wantFile) { - ok = true - break - } - } - if (! ok) { - extraList << haveFile - } - } - - def commonFiles = actualFiles.intersect(expectedFiles) - Assert.assertTrue("${testName} fail: Directory content for ${dir.path} does not match reference. Missing files: ${missingList}. Extra files: ${extraList}", - missingList.size() == 0 && extraList.size() == 0) - break - case 'hadoop_tools': - def toolsPathStr = getEnv("HADOOP_TOOLS_PATH", "hadoop envvars") - Assert.assertNotNull("${testName} fail: HADOOP_TOOLS_PATH environment variable should be set", toolsPathStr) - - def toolsPath = new File(toolsPathStr) - Assert.assertTrue("${testName} fail: HADOOP_TOOLS_PATH must be an absolute path.", toolsPath.isAbsolute()) - - Shell sh = new Shell() - def classPath = sh.exec("hadoop classpath").getOut().join("\n") - Assert.assertTrue("${testName} fail: Failed to retrieve hadoop's classpath", sh.getRet()==0) - - Assert.assertFalse("${testName} fail: The enire '${toolsPath}' path should not be included in the hadoop's classpath", - classPath.split(File.pathSeparator).any { - new File(it).getCanonicalPath() =~ /^${toolsPath}\/?\*/ - } - ) - break - case 'api_examination': - def basedir = getEnv(arguments['baseDirEnv'], arguments['envcmd']) - def libdir = getEnv(arguments['libDir'], arguments['envcmd']) - - def dir = new File(basedir + "/" + libdir) - Assert.assertTrue("Expected " + dir.getPath() + " to be a directory", dir.isDirectory()) - def pattern = Pattern.compile(arguments['jar'] + "-[0-9]+.*\\.jar") - def String[] jars = dir.list(new FilenameFilter() { - @Override - boolean accept(File d, String name) { - Matcher matcher = pattern.matcher(name) - return (matcher.matches() && !name.contains("test")) - } - }) - Assert.assertEquals("Expected only one jar, but got " + jars.join(", "), 1, jars.length) - def jar = dir.getAbsolutePath() + "/" + jars[0] - - def examinerJar = System.properties['odpi.test.hive.hcat.job.jar'] - def resourceFile = System.properties['test.resources.dir']+ "/" + arguments['resourceFile'] - Shell sh = new Shell() - def results = sh.exec("hadoop jar " + examinerJar + " org.odpi.specs.runtime.hadoop.ApiExaminer -c " + resourceFile + " -j " + jar).getErr() - int rc = sh.getRet() - Assert.assertEquals("Expected command to succeed, but got return code " + rc, 0, rc) - if (results.size() > 0) { - System.out.println("Received report for jar " + arguments['jar'] + results.join("\n")) - } - break; - - - default: - break - } - } -} diff --git a/bigtop-tests/spec-tests/runtime/src/test/java/org/odpi/specs/runtime/hive/HiveHelper.java b/bigtop-tests/spec-tests/runtime/src/test/java/org/odpi/specs/runtime/hive/HiveHelper.java deleted file mode 100644 index 3e56224b..00000000 --- a/bigtop-tests/spec-tests/runtime/src/test/java/org/odpi/specs/runtime/hive/HiveHelper.java +++ /dev/null @@ -1,121 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 - *

- * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.odpi.specs.runtime.hive; - -import java.io.ByteArrayOutputStream; -import java.io.IOException; -import java.util.HashMap; -import java.util.Map; - -import org.apache.commons.exec.CommandLine; -import org.apache.commons.exec.DefaultExecuteResultHandler; -import org.apache.commons.exec.DefaultExecutor; -import org.apache.commons.exec.ExecuteException; -import org.apache.commons.exec.ExecuteWatchdog; -import org.apache.commons.exec.Executor; -import org.apache.commons.exec.PumpStreamHandler; -import org.apache.commons.exec.environment.EnvironmentUtils; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; - -public class HiveHelper { - - private static final Log LOG = LogFactory.getLog(HiveHelper.class.getName()); - - public static Map execCommand(CommandLine commandline) { - return execCommand(commandline, null); - } - - public static Map execCommand(CommandLine commandline, - Map envVars) { - - System.out.println("Executing command:"); - System.out.println(commandline.toString()); - Map env = null; - Map entry = new HashMap(); - try { - env = EnvironmentUtils.getProcEnvironment(); - } catch (IOException e1) { - // TODO Auto-generated catch block - LOG.debug("Failed to get process environment: "+ e1.getMessage()); - e1.printStackTrace(); - } - if (envVars != null) { - for (String key : envVars.keySet()) { - env.put(key, envVars.get(key)); - } - } - - DefaultExecuteResultHandler resultHandler = new DefaultExecuteResultHandler(); - ByteArrayOutputStream outputStream = new ByteArrayOutputStream(); - PumpStreamHandler streamHandler = new PumpStreamHandler(outputStream); - ExecuteWatchdog watchdog = new ExecuteWatchdog(60*10000); - Executor executor = new DefaultExecutor(); - executor.setExitValue(1); - executor.setWatchdog(watchdog); - executor.setStreamHandler(streamHandler); - try { - executor.execute(commandline, env, resultHandler); - } catch (ExecuteException e) { - // TODO Auto-generated catch block - LOG.debug("Failed to execute command with exit value: "+ String.valueOf(resultHandler.getExitValue())); - LOG.debug("outputStream: "+ outputStream.toString()); - entry.put("exitValue", String.valueOf(resultHandler.getExitValue())); - entry.put("outputStream", outputStream.toString() + e.getMessage()); - e.printStackTrace(); - return entry; - } catch (IOException e) { - // TODO Auto-generated catch block - LOG.debug("Failed to execute command with exit value: "+ String.valueOf(resultHandler.getExitValue())); - LOG.debug("outputStream: "+ outputStream.toString()); - entry.put("exitValue", String.valueOf(resultHandler.getExitValue())); - entry.put("outputStream", outputStream.toString() + e.getMessage()); - e.printStackTrace(); - return entry; - } - - try { - resultHandler.waitFor(); - /*System.out.println("Command output: "+outputStream.toString());*/ - entry.put("exitValue", String.valueOf(resultHandler.getExitValue())); - entry.put("outputStream", outputStream.toString()); - return entry; - } catch (InterruptedException e) { - // TODO Auto-generated catch block - /*System.out.println("Command output: "+outputStream.toString());*/ - LOG.debug("exitValue: "+ String.valueOf(resultHandler.getExitValue())); - LOG.debug("outputStream: "+ outputStream.toString()); - entry.put("exitValue", String.valueOf(resultHandler.getExitValue())); - entry.put("outputStream", outputStream.toString()); - e.printStackTrace(); - return entry; - } - } - - protected static String getProperty(String property, String description) { - String val = System.getProperty(property); - if (val == null) { - throw new RuntimeException("You must set the property " + property + " with " + - description); - } - LOG.debug(description + " is " + val); - return val; - } - - -} diff --git a/bigtop-tests/spec-tests/runtime/src/test/java/org/odpi/specs/runtime/hive/JdbcConnector.java b/bigtop-tests/spec-tests/runtime/src/test/java/org/odpi/specs/runtime/hive/JdbcConnector.java deleted file mode 100644 index 7512dabf..00000000 --- a/bigtop-tests/spec-tests/runtime/src/test/java/org/odpi/specs/runtime/hive/JdbcConnector.java +++ /dev/null @@ -1,79 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 - *

- * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.odpi.specs.runtime.hive; - -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; -import org.junit.AfterClass; -import org.junit.BeforeClass; - -import java.sql.Connection; -import java.sql.DriverManager; -import java.sql.SQLException; -import java.util.Properties; - -public class JdbcConnector { - private static final Log LOG = LogFactory.getLog(JdbcConnector.class.getName()); - - protected static final String URL = "odpi.test.hive.jdbc.url"; - protected static final String USER = "odpi.test.hive.jdbc.user"; - protected static final String PASSWD = "odpi.test.hive.jdbc.password"; - protected static final String LOCATION = "odpi.test.hive.location"; - protected static final String METASTORE_URL = "odpi.test.hive.metastore.url"; - protected static final String TEST_THRIFT = "odpi.test.hive.thrift.test"; - protected static final String TEST_HCATALOG = "odpi.test.hive.hcatalog.test"; - protected static final String HIVE_CONF_DIR = "odpi.test.hive.conf.dir"; - protected static final String HADOOP_CONF_DIR = "odpi.test.hadoop.conf.dir"; - - protected static Connection conn; - - @BeforeClass - public static void connectToJdbc() throws SQLException { - // Assume they've put the URL for the JDBC driver in an environment variable. - String jdbcUrl = getProperty(URL, "the JDBC URL"); - String jdbcUser = getProperty(USER, "the JDBC user name"); - String jdbcPasswd = getProperty(PASSWD, "the JDBC password"); - - Properties props = new Properties(); - props.put("user", jdbcUser); - if (!jdbcPasswd.equals("")) props.put("password", jdbcPasswd); - conn = DriverManager.getConnection(jdbcUrl, props); - } - - @AfterClass - public static void closeJdbc() throws SQLException { - if (conn != null) conn.close(); - } - - protected static String getProperty(String property, String description) { - String val = System.getProperty(property); - if (val == null) { - throw new RuntimeException("You must set the property " + property + " with " + - description); - } - LOG.debug(description + " is " + val); - return val; - } - - protected static boolean testActive(String property, String description) { - String val = System.getProperty(property, "true"); - LOG.debug(description + " is " + val); - return Boolean.valueOf(val); - } - -} diff --git a/bigtop-tests/spec-tests/runtime/src/test/java/org/odpi/specs/runtime/hive/TestBeeline.java b/bigtop-tests/spec-tests/runtime/src/test/java/org/odpi/specs/runtime/hive/TestBeeline.java deleted file mode 100644 index 578621aa..00000000 --- a/bigtop-tests/spec-tests/runtime/src/test/java/org/odpi/specs/runtime/hive/TestBeeline.java +++ /dev/null @@ -1,201 +0,0 @@ -package org.odpi.specs.runtime.hive; -import org.apache.commons.exec.CommandLine; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; -import org.junit.AfterClass; -import org.junit.Assert; -import org.junit.BeforeClass; -import org.junit.Test; -import java.io.FileNotFoundException; -import java.io.PrintWriter; -import java.util.Map; - -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 - *

- * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -public class TestBeeline { - - public static final Log LOG = LogFactory.getLog(TestBeeline.class.getName()); - - private static final String URL = "odpi.test.hive.jdbc.url"; - private static final String USER = "odpi.test.hive.jdbc.user"; - private static final String PASSWD = "odpi.test.hive.jdbc.password"; - - private static Map results; - private static String beelineUrl; - private static String beelineUser; - private static String beelinePasswd; - - //creating beeline base command with username and password as per inputs - private static CommandLine beelineBaseCommand = new CommandLine("beeline"); - - @BeforeClass - public static void initialSetup(){ - TestBeeline.beelineUrl = System.getProperty(URL); - TestBeeline.beelineUser = System.getProperty(USER); - TestBeeline.beelinePasswd =System.getProperty(PASSWD); - - if (beelineUser != null && beelineUser != "" && beelinePasswd != null && beelinePasswd != "") - { - beelineBaseCommand.addArgument("-u").addArgument(beelineUrl).addArgument("-n").addArgument(beelineUser).addArgument("-p").addArgument(beelinePasswd); - } - else if (beelineUser != null && beelineUser != "") - { - beelineBaseCommand.addArgument("-u").addArgument(beelineUrl).addArgument("-n").addArgument(beelineUser); - } - else { - beelineBaseCommand.addArgument("-u").addArgument(beelineUrl); - } - LOG.info("URL is " + beelineUrl); - LOG.info("User is " + beelineUser); - LOG.info("Passwd is " + beelinePasswd); - LOG.info("Passwd is null " + (beelinePasswd == null)); - } - - @Test - public void checkBeeline() { - results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand)); - String consoleMsg = results.get("outputStream").toLowerCase(); - Assert.assertEquals("beeline -u FAILED. \n" +results.get("outputStream"), true, consoleMsg.contains("connecting to "+beelineUrl.toLowerCase()) && !consoleMsg.contains("error") && !consoleMsg.contains("exception")); - } - - @Test - public void checkBeelineConnect(){ - try(PrintWriter out = new PrintWriter("connect.url")){ out.println("!connect " + beelineUrl+" "+beelineUser+" "+beelinePasswd); out.println("!quit"); } - catch (FileNotFoundException e1) { - e1.printStackTrace(); - } - results = HiveHelper.execCommand(new CommandLine("/bin/sh").addArgument("-c").addArgument("beeline -f connect.url",false)); - String consoleMsg = results.get("outputStream").toLowerCase(); - Assert.assertEquals("beeline !connect FAILED. \n" +results.get("outputStream"), true, consoleMsg.contains("connecting to "+beelineUrl.toLowerCase()) && !consoleMsg.contains("error") && !consoleMsg.contains("exception") ); - } - - @Test - public void checkBeelineHelp(){ - results = HiveHelper.execCommand(new CommandLine("beeline").addArgument("--help")); - String consoleMsg = results.get("outputStream").toLowerCase(); - Assert.assertEquals("beeline --help FAILED. \n" +results.get("outputStream"), true, consoleMsg.contains("display this message" ) && consoleMsg.contains("usage: java org.apache.hive.cli.beeline.beeline") && !consoleMsg.contains("exception")); - } - - @Test - public void checkBeelineQueryExecFromCmdLine(){ - results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("-e").addArgument("SHOW DATABASES;")); - if(!results.get("outputStream").contains("odpi_runtime_hive")){ - results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("-e").addArgument("CREATE DATABASE odpi_runtime_hive;")); - results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("-e").addArgument("SHOW DATABASES;")); - }else{ - results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("-e").addArgument("DROP DATABASE odpi_runtime_hive;")); - results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("-e").addArgument("CREATE DATABASE odpi_runtime_hive;")); - results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("-e").addArgument("SHOW DATABASES;")); - } - String consoleMsg = results.get("outputStream").toLowerCase(); - Assert.assertEquals("beeline -e FAILED. \n" +results.get("outputStream"), true, consoleMsg.contains("odpi_runtime_hive" ) && !consoleMsg.contains("error") && !consoleMsg.contains("exception")); - HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("-e").addArgument("DROP DATABASE odpi_runtime_hive")); - } - - @Test - public void checkBeelineQueryExecFromFile() throws FileNotFoundException{ - - try(PrintWriter out = new PrintWriter("beeline-f1.sql")){ out.println("SHOW DATABASES;"); } - try(PrintWriter out = new PrintWriter("beeline-f2.sql")){ out.println("CREATE DATABASE odpi_runtime_hive;"); } - try(PrintWriter out = new PrintWriter("beeline-f3.sql")){ out.println("DROP DATABASE odpi_runtime_hive;"); out.println("CREATE DATABASE odpi_runtime_hive;"); } - try(PrintWriter out = new PrintWriter("beeline-f4.sql")){ out.println("DROP DATABASE odpi_runtime_hive;"); } - results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("-f").addArgument("beeline-f1.sql",false)); - - if(!results.get("outputStream").contains("odpi_runtime_hive")){ - results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("-f").addArgument("beeline-f2.sql",false)); - }else{ - results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("-f").addArgument("beeline-f3.sql",false)); - } - - results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("-f").addArgument("beeline-f1.sql",false)); - - String consoleMsg = results.get("outputStream").toLowerCase(); - Assert.assertEquals("beeline -f FAILED. \n" +results.get("outputStream"), true, consoleMsg.contains("odpi_runtime_hive" ) && !consoleMsg.contains("error") && !consoleMsg.contains("exception")); - HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("-f").addArgument("beeline-f4.sql",false)); - } - - @Test - public void checkBeelineInitFile() throws FileNotFoundException{ - - try(PrintWriter out = new PrintWriter("beeline-i1.sql")){ out.println("SHOW DATABASES;"); } - try(PrintWriter out = new PrintWriter("beeline-i2.sql")){ out.println("CREATE DATABASE odpi_runtime_beeline_init;"); } - try(PrintWriter out = new PrintWriter("beeline-i3.sql")){ out.println("DROP DATABASE odpi_runtime_beeline_init;"); out.println("CREATE DATABASE odpi_runtime_beeline_init;"); } - try(PrintWriter out = new PrintWriter("beeline-i4.sql")){ out.println("DROP DATABASE odpi_runtime_beeline_init;"); } - results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("-i").addArgument("beeline-i1.sql",false)); - - if(!results.get("outputStream").contains("odpi_runtime_beeline_init")){ - results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("-i").addArgument("beeline-i2.sql",false)); - }else{ - results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("-i").addArgument("beeline-i3.sql",false)); - } - - results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("-i").addArgument("beeline-i1.sql",false)); - String consoleMsg = results.get("outputStream").toLowerCase(); - Assert.assertEquals("beeline -i FAILED. \n" +results.get("outputStream"), true, consoleMsg.contains("odpi_runtime_beeline_init") && !consoleMsg.contains("error") && !consoleMsg.contains("exception")); - HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("-i").addArgument("beeline-i4.sql",false)); - } - - @Test - public void checkBeelineHiveVar() throws FileNotFoundException{ - - try(PrintWriter out = new PrintWriter("beeline-hv1.sql")){ out.println("SHOW DATABASES;"); } - try(PrintWriter out = new PrintWriter("beeline-hv2.sql")){ out.println("CREATE DATABASE ${db};"); } - try(PrintWriter out = new PrintWriter("beeline-hv3.sql")){ out.println("DROP DATABASE ${db};"); out.println("CREATE DATABASE ${db};"); } - try(PrintWriter out = new PrintWriter("beeline-hv4.sql")){ out.println("DROP DATABASE ${db};"); } - results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("--hivevar").addArgument("db=odpi_runtime_beeline_hivevar").addArgument("-i").addArgument("beeline-hv1.sql",false)); - - if(!results.get("outputStream").contains("odpi_runtime_beeline_hivevar")){ - results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("--hivevar").addArgument("db=odpi_runtime_beeline_hivevar").addArgument("-i").addArgument("beeline-hv2.sql",false)); - }else{ - results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("--hivevar").addArgument("db=odpi_runtime_beeline_hivevar").addArgument("-i").addArgument("beeline-hv3.sql",false)); - } - - results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("--hivevar").addArgument("db=odpi_runtime_beeline_hivevar").addArgument("-i").addArgument("beeline-hv1.sql",false)); - String consoleMsg = results.get("outputStream").toLowerCase(); - Assert.assertEquals("beeline --hivevar FAILED. \n" +results.get("outputStream"), true, consoleMsg.contains("odpi_runtime_beeline_hivevar") && !consoleMsg.contains("error") && !consoleMsg.contains("exception")); - HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("--hivevar").addArgument("db=odpi_runtime_beeline_hivevar").addArgument("-i").addArgument("beeline-hv4.sql",false)); - } - - @Test - public void checkBeelineFastConnect(){ - results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("--fastConnect=false")); - String consoleMsg = results.get("outputStream").toLowerCase(); - Assert.assertEquals("beeline --fastConnect FAILED. \n" +results.get("outputStream"), true, consoleMsg.contains("set fastconnect to true to skip")); - } - - @Test - public void checkBeelineVerbose(){ - results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("--verbose=true")); - String consoleMsg = results.get("outputStream").toLowerCase(); - Assert.assertEquals("beeline --verbose FAILED." +results.get("outputStream"), true, consoleMsg.contains("issuing: !connect jdbc:hive2:") && !consoleMsg.contains("error") && !consoleMsg.contains("exception")); - } - - @Test - public void checkBeelineShowHeader(){ - results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("--showHeader=false").addArgument("-e").addArgument("SHOW DATABASES;")); - String consoleMsg = results.get("outputStream").toLowerCase(); - Assert.assertEquals("beeline --showHeader FAILED. \n" +results.get("outputStream"), true, consoleMsg.contains("default")&&!consoleMsg.contains("database_name") && !consoleMsg.contains("error") && !consoleMsg.contains("exception")); - } - - @AfterClass - public static void cleanup() throws FileNotFoundException { - results = HiveHelper.execCommand(new CommandLine("/bin/sh").addArgument("-c").addArgument("rm -rf beeline*.sql", false)); - results = HiveHelper.execCommand(new CommandLine("/bin/sh").addArgument("-c").addArgument("rm -rf connect.url", false)); - } -} diff --git a/bigtop-tests/spec-tests/runtime/src/test/java/org/odpi/specs/runtime/hive/TestCLI.java b/bigtop-tests/spec-tests/runtime/src/test/java/org/odpi/specs/runtime/hive/TestCLI.java deleted file mode 100644 index 2b70909e..00000000 --- a/bigtop-tests/spec-tests/runtime/src/test/java/org/odpi/specs/runtime/hive/TestCLI.java +++ /dev/null @@ -1,213 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 - *

- * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.odpi.specs.runtime.hive; - -import java.io.FileNotFoundException; -import java.io.PrintWriter; -import java.util.Map; - -import org.apache.commons.exec.CommandLine; -import org.junit.BeforeClass; -import org.junit.Test; -import org.junit.AfterClass; -import org.junit.Assert; - -public class TestCLI { - - static Map results; - static String db = "javax.jdo.option.ConnectionURL=jdbc:derby:;databaseName=odpi_metastore_db;create=true"; - - @BeforeClass - public static void setup(){ - - results = HiveHelper.execCommand(new CommandLine("which").addArgument("hive")); - Assert.assertEquals("Hive is not in the current path.", 0, Integer.parseInt(results.get("exitValue"))); - } - - @Test - public void help(){ - results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-H")); - //LOG.info(results.get("exitValue")); - Assert.assertEquals("Error in executing 'hive -H'", 2, Integer.parseInt(results.get("exitValue"))); - - results = HiveHelper.execCommand(new CommandLine("hive").addArgument("--help")); - Assert.assertEquals("Error in executing 'hive --help'", 0, Integer.parseInt(results.get("exitValue"))); - - results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-U")); - Assert.assertEquals("Unrecognized option should exit 1.", 1, Integer.parseInt(results.get("exitValue"))); - } - - @Test - public void sqlFromCmdLine(){ - - results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("SHOW DATABASES").addArgument("--hiveconf").addArgument(db)); - Assert.assertEquals("SHOW DATABASES command failed to execute.", 0, Integer.parseInt(results.get("exitValue"))); - if(!results.get("outputStream").contains("odpi_runtime_hive")){ - results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("CREATE DATABASE odpi_runtime_hive").addArgument("--hiveconf").addArgument(db)); - Assert.assertEquals("Could not create database odpi_runtime_hive.", 0, Integer.parseInt(results.get("exitValue"))); - }else{ - results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("DROP DATABASE odpi_runtime_hive").addArgument("--hiveconf").addArgument(db)); - results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("CREATE DATABASE odpi_runtime_hive").addArgument("--hiveconf").addArgument(db)); - Assert.assertEquals("Could not create database odpi_runtime_hive.", 0, Integer.parseInt(results.get("exitValue"))); - } - results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("DROP DATABASE odpi_runtime_hive").addArgument("--hiveconf").addArgument(db)); - } - - @Test - public void sqlFromFiles() throws FileNotFoundException{ - try(PrintWriter out = new PrintWriter("hive-f1.sql")){ out.println("SHOW DATABASES;"); } - try(PrintWriter out = new PrintWriter("hive-f2.sql")){ out.println("CREATE DATABASE odpi_runtime_hive;"); } - try(PrintWriter out = new PrintWriter("hive-f3.sql")){ out.println("DROP DATABASE odpi_runtime_hive;"); out.println("CREATE DATABASE odpi_runtime_hive;"); } - try(PrintWriter out = new PrintWriter("hive-f4.sql")){ out.println("DROP DATABASE odpi_runtime_hive;"); } - results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-f").addArgument("hive-f1.sql").addArgument("--hiveconf").addArgument(db)); - Assert.assertEquals("SHOW DATABASES command failed to execute.", 0, Integer.parseInt(results.get("exitValue"))); - if(!results.get("outputStream").contains("odpi_runtime_hive")){ - results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-f").addArgument("hive-f2.sql").addArgument("--hiveconf").addArgument(db)); - Assert.assertEquals("Could not create database odpi_runtime_hive.", 0, Integer.parseInt(results.get("exitValue"))); - }else{ - results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-f").addArgument("hive-f3.sql").addArgument("--hiveconf").addArgument(db)); - Assert.assertEquals("Could not create database odpi_runtime_hive.", 0, Integer.parseInt(results.get("exitValue"))); - } - results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-f").addArgument("hive-f4.sql").addArgument("--hiveconf").addArgument(db)); - } - - @Test - public void silent() { - results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("SHOW DATABASES").addArgument("-S").addArgument("--hiveconf").addArgument(db)); - Assert.assertEquals("-S option did not work.", new Boolean(false), results.get("outputStream").contains("Time taken:")); - - results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("SHOW DATABASES").addArgument("--silent").addArgument("--hiveconf").addArgument(db)); - Assert.assertEquals("--silent option did not work.", new Boolean(false), results.get("outputStream").contains("Time taken:")); - } - - @Test - public void verbose(){ - results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("SHOW DATABASES").addArgument("-v").addArgument("--hiveconf").addArgument(db)); - Assert.assertEquals("-v option did not work.", new Boolean(true), results.get("outputStream").contains("SHOW DATABASES")); - - results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("SHOW DATABASES").addArgument("--verbose").addArgument("--hiveconf").addArgument(db)); - Assert.assertEquals("--verbose option did not work.", new Boolean(true), results.get("outputStream").contains("SHOW DATABASES")); - } - - @Test - public void initialization() throws FileNotFoundException{ - try(PrintWriter out = new PrintWriter("hive-init1.sql")){ out.println("CREATE DATABASE odpi_runtime_hive;"); } - try(PrintWriter out = new PrintWriter("hive-init2.sql")){ out.println("DROP DATABASE odpi_runtime_hive;"); out.println("CREATE DATABASE odpi_runtime_hive;"); } - - results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("SHOW DATABASES").addArgument("--hiveconf").addArgument(db)); - Assert.assertEquals("SHOW DATABASES command failed to execute.", 0, Integer.parseInt(results.get("exitValue"))); - if(!results.get("outputStream").contains("odpi_runtime_hive")){ - results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-i").addArgument("hive-init1.sql").addArgument("-e").addArgument("SHOW DATABASES").addArgument("--hiveconf").addArgument(db)); - Assert.assertEquals("Could not create database odpi_runtime_hive using the init -i option.", 0, Integer.parseInt(results.get("exitValue"))); - Assert.assertEquals("Could not create database odpi_runtime_hive using the init -i option.", true, results.get("outputStream").contains("odpi_runtime_hive")); - }else{ - results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-i").addArgument("hive-init2.sql").addArgument("-e").addArgument("SHOW DATABASES").addArgument("--hiveconf").addArgument(db)); - Assert.assertEquals("Could not create database odpi_runtime_hive.", 0, Integer.parseInt(results.get("exitValue"))); - Assert.assertEquals("Could not create database odpi_runtime_hive using the init -i option.", true, results.get("outputStream").contains("odpi_runtime_hive")); - } - results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("DROP DATABASE odpi_runtime_hive").addArgument("--hiveconf").addArgument(db)); - } - - @Test - public void database(){ - - results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("SHOW DATABASES").addArgument("--hiveconf").addArgument(db)); - if(!results.get("outputStream").contains("odpi_runtime_hive")){ - results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("CREATE DATABASE odpi_runtime_hive").addArgument("--hiveconf").addArgument(db)); - }else{ - results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("DROP DATABASE odpi_runtime_hive").addArgument("--hiveconf").addArgument(db)); - results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("CREATE DATABASE odpi_runtime_hive").addArgument("--hiveconf").addArgument(db)); - } - results = HiveHelper.execCommand(new CommandLine("hive").addArgument("--database").addArgument("odpi_runtime_hive_1234").addArgument("-e").addArgument("CREATE TABLE odpi ( MYID INT );").addArgument("--hiveconf").addArgument(db)); - Assert.assertEquals("Non-existent database returned with wrong exit code: "+Integer.parseInt(results.get("exitValue")), 88, Integer.parseInt(results.get("exitValue"))); - - results = HiveHelper.execCommand(new CommandLine("hive").addArgument("--database").addArgument("odpi_runtime_hive").addArgument("-e").addArgument("CREATE TABLE odpi ( MYID INT );").addArgument("--hiveconf").addArgument(db)); - Assert.assertEquals("Failed to create table using --database argument.", 0, Integer.parseInt(results.get("exitValue"))); - - results = HiveHelper.execCommand(new CommandLine("hive").addArgument("--database").addArgument("odpi_runtime_hive").addArgument("-e").addArgument("DESCRIBE odpi").addArgument("--hiveconf").addArgument(db)); - Assert.assertEquals("Failed to get expected column after creating odpi table using --database argument.", true, results.get("outputStream").contains("myid")); - - results = HiveHelper.execCommand(new CommandLine("hive").addArgument("--database").addArgument("odpi_runtime_hive").addArgument("-e").addArgument("DROP TABLE odpi").addArgument("--hiveconf").addArgument(db)); - Assert.assertEquals("Failed to create table using --database argument.", 0, Integer.parseInt(results.get("exitValue"))); - - results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("DROP DATABASE odpi_runtime_hive").addArgument("--hiveconf").addArgument(db)); - } - - @Test - public void hiveConf(){ - results = HiveHelper.execCommand(new CommandLine("hive").addArgument("--hiveconf").addArgument("hive.root.logger=INFO,console").addArgument("-e").addArgument("SHOW DATABASES").addArgument("--hiveconf").addArgument(db)); - Assert.assertEquals("The --hiveconf option did not work in setting hive.root.logger=INFO,console.", true, results.get("outputStream").contains("INFO parse.ParseDriver: Parsing command: SHOW DATABASES")); - - results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-hiveconf").addArgument("hive.root.logger=INFO,console").addArgument("-e").addArgument("SHOW DATABASES").addArgument("--hiveconf").addArgument(db)); - Assert.assertEquals("The -hiveconf variant option did not work in setting hive.root.logger=INFO,console.", true, results.get("outputStream").contains("INFO parse.ParseDriver: Parsing command: SHOW DATABASES")); - } - - @Test - public void variableSubsitution() throws FileNotFoundException{ - results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("SHOW DATABASES").addArgument("--hiveconf").addArgument(db)); - if(!results.get("outputStream").contains("odpi_runtime_hive")){ - results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("CREATE DATABASE odpi_runtime_hive").addArgument("--hiveconf").addArgument(db)); - }else{ - results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("DROP DATABASE odpi_runtime_hive").addArgument("--hiveconf").addArgument(db)); - results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("CREATE DATABASE odpi_runtime_hive").addArgument("--hiveconf").addArgument(db)); - } - try(PrintWriter out = new PrintWriter("hive-define.sql")){ out.println("show ${A};"); out.println("quit;"); } - results = HiveHelper.execCommand(new CommandLine("/bin/sh").addArgument("-c").addArgument("hive -d A=DATABASES --hiveconf '"+db+"' < hive-define.sql", false)); - Assert.assertEquals("The hive -d A=DATABASES option did not work.", 0, Integer.parseInt(results.get("exitValue"))); - Assert.assertEquals("The hive -d A=DATABASES option did not work.", true, results.get("outputStream").contains("odpi_runtime_hive")); - - results = HiveHelper.execCommand(new CommandLine("/bin/sh").addArgument("-c").addArgument("hive --define A=DATABASES --hiveconf '"+db+"' < hive-define.sql", false)); - Assert.assertEquals("The hive --define A=DATABASES option did not work.", 0, Integer.parseInt(results.get("exitValue"))); - Assert.assertEquals("The hive --define A=DATABASES option did not work.", true, results.get("outputStream").contains("odpi_runtime_hive")); - - results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("DROP DATABASE odpi_runtime_hive").addArgument("--hiveconf").addArgument(db)); - } - - @Test - public void hiveVar() throws FileNotFoundException{ - results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("SHOW DATABASES").addArgument("--hiveconf").addArgument(db)); - if(!results.get("outputStream").contains("odpi_runtime_hive")){ - results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("CREATE DATABASE odpi_runtime_hive").addArgument("--hiveconf").addArgument(db)); - }else{ - results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("DROP DATABASE odpi_runtime_hive").addArgument("--hiveconf").addArgument(db)); - results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("CREATE DATABASE odpi_runtime_hive").addArgument("--hiveconf").addArgument(db)); - } - try(PrintWriter out = new PrintWriter("hive-var.sql")){ out.println("show ${A};"); out.println("quit;"); } - results = HiveHelper.execCommand(new CommandLine("/bin/sh").addArgument("-c").addArgument("hive --hivevar A=DATABASES --hiveconf '"+db+"' < hive-var.sql", false)); - Assert.assertEquals("The hive --hivevar A=DATABASES option did not work.", 0, Integer.parseInt(results.get("exitValue"))); - Assert.assertEquals("The hive --hivevar A=DATABASES option did not work.", true, results.get("outputStream").contains("odpi_runtime_hive")); - - try(PrintWriter out = new PrintWriter("hiveconf-var.sql")){ out.println("show ${hiveconf:A};"); out.println("quit;"); } - results = HiveHelper.execCommand(new CommandLine("/bin/sh").addArgument("-c").addArgument("hive --hiveconf A=DATABASES --hiveconf '"+db+"' < hiveconf-var.sql", false)); - Assert.assertEquals("The hive --hiveconf A=DATABASES option did not work.", 0, Integer.parseInt(results.get("exitValue"))); - Assert.assertEquals("The hive --hiveconf A=DATABASES option did not work.", true, results.get("outputStream").contains("odpi_runtime_hive")); - - results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("DROP DATABASE odpi_runtime_hive").addArgument("--hiveconf").addArgument(db)); - } - - @AfterClass - public static void cleanup(){ - results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("DROP DATABASE odpi_runtime_hive").addArgument("--hiveconf").addArgument(db)); - results = HiveHelper.execCommand(new CommandLine("/bin/sh").addArgument("-c").addArgument("rm -rf hive-f*.sql", false)); - results = HiveHelper.execCommand(new CommandLine("/bin/sh").addArgument("-c").addArgument("rm -rf hive-init*.sql", false)); - results = HiveHelper.execCommand(new CommandLine("/bin/sh").addArgument("-c").addArgument("rm -rf hive-define.sql", false)); - results = HiveHelper.execCommand(new CommandLine("/bin/sh").addArgument("-c").addArgument("rm -rf hive-var.sql", false)); - results = HiveHelper.execCommand(new CommandLine("/bin/sh").addArgument("-c").addArgument("rm -rf hiveconf-var.sql", false)); - } - -} diff --git a/bigtop-tests/spec-tests/runtime/src/test/java/org/odpi/specs/runtime/hive/TestHCatalog.java b/bigtop-tests/spec-tests/runtime/src/test/java/org/odpi/specs/runtime/hive/TestHCatalog.java deleted file mode 100644 index 0ea49ce8..00000000 --- a/bigtop-tests/spec-tests/runtime/src/test/java/org/odpi/specs/runtime/hive/TestHCatalog.java +++ /dev/null @@ -1,158 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 - *

- * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.odpi.specs.runtime.hive; - -import org.apache.commons.exec.CommandLine; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; -import org.apache.hadoop.fs.FSDataOutputStream; -import org.apache.hadoop.fs.FileSystem; -import org.apache.hadoop.fs.Path; -import org.apache.hadoop.hive.conf.HiveConf; -import org.apache.hadoop.hive.metastore.HiveMetaStoreClient; -import org.apache.hadoop.hive.metastore.IMetaStoreClient; -import org.apache.hadoop.hive.metastore.TableType; -import org.apache.hadoop.hive.metastore.api.FieldSchema; -import org.apache.hadoop.hive.metastore.api.MetaException; -import org.apache.hadoop.hive.metastore.api.SerDeInfo; -import org.apache.hadoop.hive.metastore.api.StorageDescriptor; -import org.apache.hadoop.hive.metastore.api.Table; -import org.apache.hive.hcatalog.data.schema.HCatFieldSchema; -import org.apache.hive.hcatalog.data.schema.HCatSchema; -import org.apache.thrift.TException; -import org.junit.Assert; -import org.junit.Assume; -import org.junit.Before; -import org.junit.BeforeClass; -import org.junit.Test; - -import java.io.IOException; -import java.net.URISyntaxException; -import java.util.Arrays; -import java.util.Collections; -import java.util.HashMap; -import java.util.Map; -import java.util.Random; - - -public class TestHCatalog { - private static final String JOBJAR = "odpi.test.hive.hcat.job.jar"; - private static final String HCATCORE = "odpi.test.hive.hcat.core.jar"; - - private static final Log LOG = LogFactory.getLog(TestHCatalog.class.getName()); - - private static IMetaStoreClient client = null; - private static HiveConf conf; - private static HCatSchema inputSchema; - private static HCatSchema outputSchema; - - private Random rand; - - @BeforeClass - public static void connect() throws MetaException { - if (JdbcConnector.testActive(JdbcConnector.TEST_HCATALOG, "Test HCatalog ")) { - String hiveConfDir = JdbcConnector.getProperty(JdbcConnector.HIVE_CONF_DIR, - "Hive conf directory "); - String hadoopConfDir = JdbcConnector.getProperty(JdbcConnector.HADOOP_CONF_DIR, - "Hadoop conf directory "); - conf = new HiveConf(); - String fileSep = System.getProperty("file.separator"); - conf.addResource(new Path(hadoopConfDir + fileSep + "core-site.xml")); - conf.addResource(new Path(hadoopConfDir + fileSep + "hdfs-site.xml")); - conf.addResource(new Path(hadoopConfDir + fileSep + "yarn-site.xml")); - conf.addResource(new Path(hadoopConfDir + fileSep + "mapred-site.xml")); - conf.addResource(new Path(hiveConfDir + fileSep + "hive-site.xml")); - client = new HiveMetaStoreClient(conf); - - } - } - - @Before - public void checkIfActive() { - Assume.assumeTrue(JdbcConnector.testActive(JdbcConnector.TEST_HCATALOG, "Test HCatalog ")); - rand = new Random(); - } - - @Test - public void hcatInputFormatOutputFormat() throws TException, IOException, ClassNotFoundException, - InterruptedException, URISyntaxException { - // Create a table to write to - final String inputTable = "odpi_hcat_input_table_" + rand.nextInt(Integer.MAX_VALUE); - SerDeInfo serde = new SerDeInfo("default_serde", - conf.getVar(HiveConf.ConfVars.HIVEDEFAULTSERDE), new HashMap()); - FieldSchema schema = new FieldSchema("line", "string", ""); - inputSchema = new HCatSchema(Collections.singletonList(new HCatFieldSchema(schema.getName(), - HCatFieldSchema.Type.STRING, schema.getComment()))); - StorageDescriptor sd = new StorageDescriptor(Collections.singletonList(schema), null, - "org.apache.hadoop.mapred.TextInputFormat", - "org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat", false, 0, serde, null, null, - new HashMap()); - Table table = new Table(inputTable, "default", "me", 0, 0, 0, sd, null, - new HashMap(), null, null, TableType.MANAGED_TABLE.toString()); - client.createTable(table); - - final String outputTable = "odpi_hcat_output_table_" + rand.nextInt(Integer.MAX_VALUE); - sd = new StorageDescriptor(Arrays.asList( - new FieldSchema("word", "string", ""), - new FieldSchema("count", "int", "")), - null, "org.apache.hadoop.mapred.TextInputFormat", - "org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat", false, 0, serde, null, null, - new HashMap()); - table = new Table(outputTable, "default", "me", 0, 0, 0, sd, null, - new HashMap(), null, null, TableType.MANAGED_TABLE.toString()); - client.createTable(table); - outputSchema = new HCatSchema(Arrays.asList( - new HCatFieldSchema("word", HCatFieldSchema.Type.STRING, ""), - new HCatFieldSchema("count", HCatFieldSchema.Type.INT, ""))); - - // LATER Could I use HCatWriter here and the reader to read it? - // Write some stuff into a file in the location of the table - table = client.getTable("default", inputTable); - String inputFile = table.getSd().getLocation() + "/input"; - Path inputPath = new Path(inputFile); - FileSystem fs = FileSystem.get(conf); - FSDataOutputStream out = fs.create(inputPath); - out.writeChars("Mary had a little lamb\n"); - out.writeChars("its fleece was white as snow\n"); - out.writeChars("and everywhere that Mary went\n"); - out.writeChars("the lamb was sure to go\n"); - out.close(); - - Map env = new HashMap<>(); - env.put("HADOOP_CLASSPATH", System.getProperty(HCATCORE, "")); - Map results = HiveHelper.execCommand(new CommandLine("hive") - .addArgument("--service") - .addArgument("jar") - .addArgument(System.getProperty(JOBJAR)) - .addArgument(HCatalogMR.class.getName()) - .addArgument("-it") - .addArgument(inputTable) - .addArgument("-ot") - .addArgument(outputTable) - .addArgument("-is") - .addArgument(inputSchema.getSchemaAsTypeString()) - .addArgument("-os") - .addArgument(outputSchema.getSchemaAsTypeString()), env); - LOG.info(results.toString()); - Assert.assertEquals("HCat job failed", 0, Integer.parseInt(results.get("exitValue"))); - - client.dropTable("default", inputTable); - client.dropTable("default", outputTable); - } - -} diff --git a/bigtop-tests/spec-tests/runtime/src/test/java/org/odpi/specs/runtime/hive/TestJdbc.java b/bigtop-tests/spec-tests/runtime/src/test/java/org/odpi/specs/runtime/hive/TestJdbc.java deleted file mode 100644 index 154fd9cd..00000000 --- a/bigtop-tests/spec-tests/runtime/src/test/java/org/odpi/specs/runtime/hive/TestJdbc.java +++ /dev/null @@ -1,545 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 - *

- * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.odpi.specs.runtime.hive; - -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; -import org.junit.Test; - -import java.sql.Connection; -import java.sql.DatabaseMetaData; -import java.sql.PreparedStatement; -import java.sql.ResultSet; -import java.sql.ResultSetMetaData; -import java.sql.SQLException; -import java.sql.SQLWarning; -import java.sql.Statement; -import java.sql.Types; - -public class TestJdbc extends JdbcConnector { - private static final Log LOG = LogFactory.getLog(TestJdbc.class.getName()); - - /** - * Test simple non-statement related class. setSchema is tested elsewhere because there's work - * to do for that one. Similarly with getMetadata. - * @throws SQLException - */ - @Test - public void nonStatementCalls() throws SQLException { - conn.clearWarnings(); - - boolean isAutoCommit = conn.getAutoCommit(); - LOG.debug("Auto commit is " + isAutoCommit); - - String catalog = conn.getCatalog(); - LOG.debug("Catalog is " + catalog); - - String schema = conn.getSchema(); - LOG.debug("Schema is " + schema); - - int txnIsolation = conn.getTransactionIsolation(); - LOG.debug("Transaction Isolation is " + txnIsolation); - - SQLWarning warning = conn.getWarnings(); - while (warning != null) { - LOG.debug("Found a warning: " + warning.getMessage()); - warning = warning.getNextWarning(); - } - - boolean closed = conn.isClosed(); - LOG.debug("Is closed? " + closed); - - boolean readOnly = conn.isReadOnly(); - LOG.debug("Is read only?" + readOnly); - - // Hive doesn't support catalogs, so setting this to whatever should be fine. If we have - // non-Hive systems trying to pass this setting it to a non-valid catalog name may cause - // issues, so we may need to make this value configurable or something. - conn.setCatalog("fred"); - } - - /** - * Test simple DatabaseMetaData calls. getColumns is tested elsewhere, as we need to call - * that on a valid table. Same with getFunctions. - * @throws SQLException - */ - @Test - public void databaseMetaDataCalls() throws SQLException { - DatabaseMetaData md = conn.getMetaData(); - - boolean boolrc = md.allTablesAreSelectable(); - LOG.debug("All tables are selectable? " + boolrc); - - String strrc = md.getCatalogSeparator(); - LOG.debug("Catalog separator " + strrc); - - strrc = md.getCatalogTerm(); - LOG.debug("Catalog term " + strrc); - - ResultSet rs = md.getCatalogs(); - while (rs.next()) { - strrc = rs.getString(1); - LOG.debug("Found catalog " + strrc); - } - - Connection c = md.getConnection(); - - int intrc = md.getDatabaseMajorVersion(); - LOG.debug("DB major version is " + intrc); - - intrc = md.getDatabaseMinorVersion(); - LOG.debug("DB minor version is " + intrc); - - strrc = md.getDatabaseProductName(); - LOG.debug("DB product name is " + strrc); - - strrc = md.getDatabaseProductVersion(); - LOG.debug("DB product version is " + strrc); - - intrc = md.getDefaultTransactionIsolation(); - LOG.debug("Default transaction isolation is " + intrc); - - intrc = md.getDriverMajorVersion(); - LOG.debug("Driver major version is " + intrc); - - intrc = md.getDriverMinorVersion(); - LOG.debug("Driver minor version is " + intrc); - - strrc = md.getDriverName(); - LOG.debug("Driver name is " + strrc); - - strrc = md.getDriverVersion(); - LOG.debug("Driver version is " + strrc); - - strrc = md.getExtraNameCharacters(); - LOG.debug("Extra name characters is " + strrc); - - strrc = md.getIdentifierQuoteString(); - LOG.debug("Identifier quote string is " + strrc); - - // In Hive 1.2 this always returns an empty RS - rs = md.getImportedKeys("a", "b", "d"); - - // In Hive 1.2 this always returns an empty RS - rs = md.getIndexInfo("a", "b", "d", true, true); - - intrc = md.getJDBCMajorVersion(); - LOG.debug("JDBC major version is " + intrc); - - intrc = md.getJDBCMinorVersion(); - LOG.debug("JDBC minor version is " + intrc); - - intrc = md.getMaxColumnNameLength(); - LOG.debug("Maximum column name length is " + intrc); - - strrc = md.getNumericFunctions(); - LOG.debug("Numeric functions are " + strrc); - - // In Hive 1.2 this always returns an empty RS - rs = md.getPrimaryKeys("a", "b", "d"); - - // In Hive 1.2 this always returns an empty RS - rs = md.getProcedureColumns("a", "b", "d", "e"); - - strrc = md.getProcedureTerm(); - LOG.debug("Procedures are called " + strrc); - - // In Hive 1.2 this always returns an empty RS - rs = md.getProcedures("a", "b", "d"); - - strrc = md.getSchemaTerm(); - LOG.debug("Schemas are called " + strrc); - - rs = md.getSchemas(); - while (rs.next()) { - strrc = rs.getString(1); - LOG.debug("Found schema " + strrc); - } - - strrc = md.getSearchStringEscape(); - LOG.debug("Search string escape is " + strrc); - - strrc = md.getStringFunctions(); - LOG.debug("String functions are " + strrc); - - strrc = md.getSystemFunctions(); - LOG.debug("System functions are " + strrc); - - rs = md.getTableTypes(); - while (rs.next()) { - strrc = rs.getString(1); - LOG.debug("Found table type " + strrc); - } - - strrc = md.getTimeDateFunctions(); - LOG.debug("Time/date functions are " + strrc); - - rs = md.getTypeInfo(); - while (rs.next()) { - strrc = rs.getString(1); - LOG.debug("Found type " + strrc); - } - - // In Hive 1.2 this always returns an empty RS - rs = md.getUDTs("a", "b", "d", null); - - boolrc = md.supportsAlterTableWithAddColumn(); - LOG.debug("Supports alter table with add column? " + boolrc); - - boolrc = md.supportsAlterTableWithDropColumn(); - LOG.debug("Supports alter table with drop column? " + boolrc); - - boolrc = md.supportsBatchUpdates(); - LOG.debug("Supports batch updates? " + boolrc); - - boolrc = md.supportsCatalogsInDataManipulation(); - LOG.debug("Supports catalogs in data manipulation? " + boolrc); - - boolrc = md.supportsCatalogsInIndexDefinitions(); - LOG.debug("Supports catalogs in index definition? " + boolrc); - - boolrc = md.supportsCatalogsInPrivilegeDefinitions(); - LOG.debug("Supports catalogs in privilege definition? " + boolrc); - - boolrc = md.supportsCatalogsInProcedureCalls(); - LOG.debug("Supports catalogs in procedure calls? " + boolrc); - - boolrc = md.supportsCatalogsInTableDefinitions(); - LOG.debug("Supports catalogs in table definition? " + boolrc); - - boolrc = md.supportsColumnAliasing(); - LOG.debug("Supports column aliasing? " + boolrc); - - boolrc = md.supportsFullOuterJoins(); - LOG.debug("Supports full outer joins? " + boolrc); - - boolrc = md.supportsGroupBy(); - LOG.debug("Supports group by? " + boolrc); - - boolrc = md.supportsLimitedOuterJoins(); - LOG.debug("Supports limited outer joins? " + boolrc); - - boolrc = md.supportsMultipleResultSets(); - LOG.debug("Supports limited outer joins? " + boolrc); - - boolrc = md.supportsNonNullableColumns(); - LOG.debug("Supports non-nullable columns? " + boolrc); - - boolrc = md.supportsOuterJoins(); - LOG.debug("Supports outer joins? " + boolrc); - - boolrc = md.supportsPositionedDelete(); - LOG.debug("Supports positioned delete? " + boolrc); - - boolrc = md.supportsPositionedUpdate(); - LOG.debug("Supports positioned update? " + boolrc); - - boolrc = md.supportsResultSetHoldability(ResultSet.HOLD_CURSORS_OVER_COMMIT); - LOG.debug("Supports result set holdability? " + boolrc); - - boolrc = md.supportsResultSetType(ResultSet.HOLD_CURSORS_OVER_COMMIT); - LOG.debug("Supports result set type? " + boolrc); - - boolrc = md.supportsSavepoints(); - LOG.debug("Supports savepoints? " + boolrc); - - boolrc = md.supportsSchemasInDataManipulation(); - LOG.debug("Supports schemas in data manipulation? " + boolrc); - - boolrc = md.supportsSchemasInIndexDefinitions(); - LOG.debug("Supports schemas in index definitions? " + boolrc); - - boolrc = md.supportsSchemasInPrivilegeDefinitions(); - LOG.debug("Supports schemas in privilege definitions? " + boolrc); - - boolrc = md.supportsSchemasInProcedureCalls(); - LOG.debug("Supports schemas in procedure calls? " + boolrc); - - boolrc = md.supportsSchemasInTableDefinitions(); - LOG.debug("Supports schemas in table definitions? " + boolrc); - - boolrc = md.supportsSelectForUpdate(); - LOG.debug("Supports select for update? " + boolrc); - - boolrc = md.supportsStoredProcedures(); - LOG.debug("Supports stored procedures? " + boolrc); - - boolrc = md.supportsTransactions(); - LOG.debug("Supports transactions? " + boolrc); - - boolrc = md.supportsUnion(); - LOG.debug("Supports union? " + boolrc); - - boolrc = md.supportsUnionAll(); - LOG.debug("Supports union all? " + boolrc); - - } - - @Test - public void setSchema() throws SQLException { - try (Statement stmt = conn.createStatement(ResultSet.TYPE_SCROLL_INSENSITIVE, - ResultSet.CONCUR_READ_ONLY)) { - - final String dbName = "odpi_jdbc_test_db"; - - final String tableName = "odpi_jdbc_test_table"; - stmt.execute("drop table if exists " + tableName); - - stmt.execute("drop database if exists " + dbName + " cascade"); - stmt.execute("create database " + dbName); - - conn.setSchema(dbName); - - DatabaseMetaData md = conn.getMetaData(); - - ResultSet rs = md.getSchemas(null, dbName); - - while (rs.next()) { - String schemaName = rs.getString(2); - LOG.debug("Schema name is " + schemaName); - } - - stmt.execute("create table " + tableName + " (i int, s varchar(32))"); - - rs = md.getTables(null, dbName, tableName, null); - while (rs.next()) { - String tName = rs.getString(3); - LOG.debug("Schema name is " + tName); - } - - rs = md.getColumns(null, dbName, tableName, "i"); - while (rs.next()) { - String colName = rs.getString(4); - LOG.debug("Schema name is " + colName); - } - - rs = md.getFunctions(null, dbName, "foo"); - while (rs.next()) { - String funcName = rs.getString(3); - LOG.debug("Schema name is " + funcName); - } - } - } - - @Test - public void statement() throws SQLException { - try (Statement stmt = conn.createStatement()) { - stmt.cancel(); - } - - try (Statement stmt = conn.createStatement()) { - stmt.clearWarnings(); - - final String tableName = "odpi_jdbc_statement_test_table"; - - stmt.execute("drop table if exists " + tableName); - stmt.execute("create table " + tableName + " (a int, b varchar(32))"); - - stmt.executeUpdate("insert into " + tableName + " values (1, 'abc'), (2, 'def')"); - - int intrc = stmt.getUpdateCount(); - LOG.debug("Update count is " + intrc); - - ResultSet rs = stmt.executeQuery("select * from " + tableName); - while (rs.next()) { - LOG.debug("Fetched " + rs.getInt(1) + "," + rs.getString(2)); - } - - Connection localConn = stmt.getConnection(); - - intrc = stmt.getFetchDirection(); - LOG.debug("Fetch direction is " + intrc); - - intrc = stmt.getFetchSize(); - LOG.debug("Fetch size is " + intrc); - - intrc = stmt.getMaxRows(); - LOG.debug("max rows is " + intrc); - - boolean boolrc = stmt.getMoreResults(); - LOG.debug("more results is " + boolrc); - - intrc = stmt.getQueryTimeout(); - LOG.debug("query timeout is " + intrc); - - stmt.execute("select * from " + tableName); - rs = stmt.getResultSet(); - while (rs.next()) { - LOG.debug("Fetched " + rs.getInt(1) + "," + rs.getString(2)); - } - - intrc = stmt.getResultSetType(); - LOG.debug("result set type is " + intrc); - - SQLWarning warning = stmt.getWarnings(); - while (warning != null) { - LOG.debug("Found a warning: " + warning.getMessage()); - warning = warning.getNextWarning(); - } - - boolrc = stmt.isClosed(); - LOG.debug("is closed " + boolrc); - - boolrc = stmt.isCloseOnCompletion(); - LOG.debug("is close on completion " + boolrc); - - boolrc = stmt.isPoolable(); - LOG.debug("is poolable " + boolrc); - - stmt.setFetchDirection(ResultSet.FETCH_FORWARD); - stmt.setFetchSize(500); - stmt.setMaxRows(500); - } - } - - @Test - public void preparedStmtAndResultSet() throws SQLException { - final String tableName = "odpi_jdbc_psars_test_table"; - try (Statement stmt = conn.createStatement()) { - stmt.execute("drop table if exists " + tableName); - stmt.execute("create table " + tableName + " (bo boolean, ti tinyint, db double, fl float, " + - "i int, lo bigint, sh smallint, st varchar(32))"); - } - - // NOTE Hive 1.2 theoretically support binary, Date & Timestamp in JDBC, but I get errors when I - // try to put them in the query. - try (PreparedStatement ps = conn.prepareStatement("insert into " + tableName + - " values (?, ?, ?, ?, ?, ?, ?, ?)")) { - ps.setBoolean(1, true); - ps.setByte(2, (byte)1); - ps.setDouble(3, 3.141592654); - ps.setFloat(4, 3.14f); - ps.setInt(5, 3); - ps.setLong(6, 10L); - ps.setShort(7, (short)20); - ps.setString(8, "abc"); - ps.executeUpdate(); - } - - try (PreparedStatement ps = conn.prepareStatement("insert into " + tableName + " (i, st) " + - "values(?, ?)", ResultSet.TYPE_SCROLL_INSENSITIVE, ResultSet.CONCUR_READ_ONLY)) { - ps.setNull(1, Types.INTEGER); - ps.setObject(2, "mary had a little lamb"); - ps.executeUpdate(); - ps.setNull(1, Types.INTEGER, null); - ps.setString(2, "its fleece was white as snow"); - ps.clearParameters(); - ps.setNull(1, Types.INTEGER, null); - ps.setString(2, "its fleece was white as snow"); - ps.execute(); - - } - - try (Statement stmt = conn.createStatement()) { - - ResultSet rs = stmt.executeQuery("select * from " + tableName); - - ResultSetMetaData md = rs.getMetaData(); - - int colCnt = md.getColumnCount(); - LOG.debug("Column count is " + colCnt); - - for (int i = 1; i <= colCnt; i++) { - LOG.debug("Looking at column " + i); - String strrc = md.getColumnClassName(i); - LOG.debug("Column class name is " + strrc); - - int intrc = md.getColumnDisplaySize(i); - LOG.debug("Column display size is " + intrc); - - strrc = md.getColumnLabel(i); - LOG.debug("Column label is " + strrc); - - strrc = md.getColumnName(i); - LOG.debug("Column name is " + strrc); - - intrc = md.getColumnType(i); - LOG.debug("Column type is " + intrc); - - strrc = md.getColumnTypeName(i); - LOG.debug("Column type name is " + strrc); - - intrc = md.getPrecision(i); - LOG.debug("Precision is " + intrc); - - intrc = md.getScale(i); - LOG.debug("Scale is " + intrc); - - boolean boolrc = md.isAutoIncrement(i); - LOG.debug("Is auto increment? " + boolrc); - - boolrc = md.isCaseSensitive(i); - LOG.debug("Is case sensitive? " + boolrc); - - boolrc = md.isCurrency(i); - LOG.debug("Is currency? " + boolrc); - - intrc = md.getScale(i); - LOG.debug("Scale is " + intrc); - - intrc = md.isNullable(i); - LOG.debug("Is nullable? " + intrc); - - boolrc = md.isReadOnly(i); - LOG.debug("Is read only? " + boolrc); - - } - - while (rs.next()) { - LOG.debug("bo = " + rs.getBoolean(1)); - LOG.debug("bo = " + rs.getBoolean("bo")); - LOG.debug("ti = " + rs.getByte(2)); - LOG.debug("ti = " + rs.getByte("ti")); - LOG.debug("db = " + rs.getDouble(3)); - LOG.debug("db = " + rs.getDouble("db")); - LOG.debug("fl = " + rs.getFloat(4)); - LOG.debug("fl = " + rs.getFloat("fl")); - LOG.debug("i = " + rs.getInt(5)); - LOG.debug("i = " + rs.getInt("i")); - LOG.debug("lo = " + rs.getLong(6)); - LOG.debug("lo = " + rs.getLong("lo")); - LOG.debug("sh = " + rs.getShort(7)); - LOG.debug("sh = " + rs.getShort("sh")); - LOG.debug("st = " + rs.getString(8)); - LOG.debug("st = " + rs.getString("st")); - LOG.debug("tm = " + rs.getObject(8)); - LOG.debug("tm = " + rs.getObject("st")); - LOG.debug("tm was null " + rs.wasNull()); - } - LOG.debug("bo is column " + rs.findColumn("bo")); - - int intrc = rs.getConcurrency(); - LOG.debug("concurrency " + intrc); - - intrc = rs.getFetchDirection(); - LOG.debug("fetch direction " + intrc); - - intrc = rs.getType(); - LOG.debug("type " + intrc); - - Statement copy = rs.getStatement(); - - SQLWarning warning = rs.getWarnings(); - while (warning != null) { - LOG.debug("Found a warning: " + warning.getMessage()); - warning = warning.getNextWarning(); - } - rs.clearWarnings(); - } - } -} diff --git a/bigtop-tests/spec-tests/runtime/src/test/java/org/odpi/specs/runtime/hive/TestSql.java b/bigtop-tests/spec-tests/runtime/src/test/java/org/odpi/specs/runtime/hive/TestSql.java deleted file mode 100644 index f2478412..00000000 --- a/bigtop-tests/spec-tests/runtime/src/test/java/org/odpi/specs/runtime/hive/TestSql.java +++ /dev/null @@ -1,337 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 - *

- * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.odpi.specs.runtime.hive; - -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; -import org.junit.Test; - -import java.sql.SQLException; -import java.sql.Statement; - -// This does not test every option that Hive supports, but does try to touch the major -// options, especially anything unique to Hive. See each test for areas tested and not tested. -public class TestSql extends JdbcConnector { - private static final Log LOG = LogFactory.getLog(TestSql.class.getName()); - - @Test - public void db() throws SQLException { - final String db1 = "odpi_sql_db1"; - final String db2 = "odpi_sql_db2"; - try (Statement stmt = conn.createStatement()) { - stmt.execute("drop database if exists " + db1 + " cascade"); - - // Simple create database - stmt.execute("create database " + db1); - stmt.execute("drop database " + db1); - - stmt.execute("drop schema if exists " + db2 + " cascade"); - - String location = getProperty(LOCATION, "a writable directory in HDFS"); - - // All the bells and whistles - stmt.execute("create schema if not exists " + db2 + " comment 'a db' location '" + location + - "' with dbproperties ('a' = 'b')"); - - stmt.execute("alter database " + db2 + " set dbproperties ('c' = 'd')"); - - stmt.execute("drop database " + db2 + " restrict"); - } - } - - @Test - public void table() throws SQLException { - final String table1 = "odpi_sql_table1"; - final String table2 = "odpi_sql_table2"; - final String table3 = "odpi_sql_table3"; - final String table4 = "odpi_sql_table4"; - final String table5 = "odpi_sql_table5"; - - try (Statement stmt = conn.createStatement()) { - stmt.execute("drop table if exists " + table1); - stmt.execute("drop table if exists " + table2); - stmt.execute("drop table if exists " + table3); - stmt.execute("drop table if exists " + table4); - stmt.execute("drop table if exists " + table5); - - String location = getProperty(LOCATION, "a writable directory in HDFS"); - stmt.execute("create external table " + table1 + "(a int, b varchar(32)) location '" + - location + "'"); - - // With a little bit of everything, except partitions, we'll do those below - stmt.execute("create table if not exists " + table2 + - "(c1 tinyint," + - " c2 smallint," + - " c3 int comment 'a column comment'," + - " c4 bigint," + - " c5 float," + - " c6 double," + - " c7 decimal," + - " c8 decimal(12)," + - " c9 decimal(8,2)," + - " c10 timestamp," + - " c11 date," + - " c12 string," + - " c13 varchar(120)," + - " c14 char(10)," + - " c15 boolean," + - " c16 binary," + - " c17 array," + - " c18 map ," + - " c19 struct," + - " c20 uniontype) " + - "comment 'table comment'" + - "clustered by (c1) sorted by (c2) into 10 buckets " + - "stored as orc " + - "tblproperties ('a' = 'b')"); - - // Not testing SKEWED BY, ROW FORMAT, STORED BY (storage handler - - stmt.execute("create temporary table " + table3 + " like " + table2); - - stmt.execute("insert into " + table1 + " values (3, 'abc'), (4, 'def')"); - - stmt.execute("create table " + table4 + " as select a, b from " + table1); - - stmt.execute("truncate table " + table4); - - stmt.execute("alter table " + table4 + " rename to " + table5); - stmt.execute("alter table " + table2 + " set tblproperties ('c' = 'd')"); - - // Not testing alter of clustered or sorted by, because that's suicidal - // Not testing alter of skewed or serde properties since we didn't test it for create - // above. - - stmt.execute("drop table " + table1 + " purge"); - stmt.execute("drop table " + table2); - stmt.execute("drop table " + table3); - stmt.execute("drop table " + table5); - } - } - - @Test - public void partitionedTable() throws SQLException { - final String table1 = "odpi_sql_ptable1"; - try (Statement stmt = conn.createStatement()) { - stmt.execute("drop table if exists " + table1); - - stmt.execute("create table " + table1 + - "(c1 int," + - " c2 varchar(32))" + - "partitioned by (p1 string comment 'a partition column')" + - "stored as orc"); - - stmt.execute("alter table " + table1 + " add partition (p1 = 'a')"); - stmt.execute("insert into " + table1 + " partition (p1 = 'a') values (1, 'abc')"); - stmt.execute("insert into " + table1 + " partition (p1 = 'a') values (2, 'def')"); - stmt.execute("insert into " + table1 + " partition (p1 = 'a') values (3, 'ghi')"); - stmt.execute("alter table " + table1 + " partition (p1 = 'a') concatenate"); - stmt.execute("alter table " + table1 + " touch partition (p1 = 'a')"); - - stmt.execute("alter table " + table1 + " add columns (c3 float)"); - stmt.execute("alter table " + table1 + " drop partition (p1 = 'a')"); - - // Not testing rename partition, exchange partition, msck repair, archive/unarchive, - // set location, enable/disable no_drop/offline, compact (because not everyone may have - // ACID on), change column - - stmt.execute("drop table " + table1); - - } - } - - @Test - public void view() throws SQLException { - final String table1 = "odpi_sql_vtable1"; - final String view1 = "odpi_sql_view1"; - final String view2 = "odpi_sql_view2"; - try (Statement stmt = conn.createStatement()) { - stmt.execute("drop table if exists " + table1); - stmt.execute("drop view if exists " + view1); - stmt.execute("drop view if exists " + view2); - stmt.execute("create table " + table1 + "(a int, b varchar(32))"); - stmt.execute("create view " + view1 + " as select a from " + table1); - - stmt.execute("create view if not exists " + view2 + - " comment 'a view comment' " + - "tblproperties ('a' = 'b') " + - "as select b from " + table1); - - stmt.execute("alter view " + view1 + " as select a, b from " + table1); - stmt.execute("alter view " + view2 + " set tblproperties('c' = 'd')"); - - stmt.execute("drop view " + view1); - stmt.execute("drop view " + view2); - } - } - - // Not testing indices because they are currently useless in Hive - // Not testing macros because as far as I know no one uses them - - @Test - public void function() throws SQLException { - final String func1 = "odpi_sql_func1"; - final String func2 = "odpi_sql_func2"; - try (Statement stmt = conn.createStatement()) { - stmt.execute("create temporary function " + func1 + - " as 'org.apache.hadoop.hive.ql.udf.UDFToInteger'"); - stmt.execute("drop temporary function " + func1); - - stmt.execute("drop function if exists " + func2); - - stmt.execute("create function " + func2 + - " as 'org.apache.hadoop.hive.ql.udf.UDFToInteger'"); - stmt.execute("drop function " + func2); - } - } - - // Not testing grant/revoke/roles as different vendors use different security solutions - // and hence different things will work here. - - // This covers insert (non-partitioned, partitioned, dynamic partitions, overwrite, with - // values and select), and multi-insert. Load is not tested as there's no guarantee that the - // test machine has access to HDFS and thus the ability to upload a file. - @Test - public void insert() throws SQLException { - final String table1 = "odpi_insert_table1"; - final String table2 = "odpi_insert_table2"; - try (Statement stmt = conn.createStatement()) { - stmt.execute("drop table if exists " + table1); - stmt.execute("create table " + table1 + - "(c1 tinyint," + - " c2 smallint," + - " c3 int," + - " c4 bigint," + - " c5 float," + - " c6 double," + - " c7 decimal(8,2)," + - " c8 varchar(120)," + - " c9 char(10)," + - " c10 boolean)" + - " partitioned by (p1 string)"); - - // insert with partition - stmt.execute("explain insert into " + table1 + " partition (p1 = 'a') values " + - "(1, 2, 3, 4, 1.1, 2.2, 3.3, 'abcdef', 'ghi', true)," + - "(5, 6, 7, 8, 9.9, 8.8, 7.7, 'jklmno', 'pqr', true)"); - - stmt.execute("set hive.exec.dynamic.partition.mode=nonstrict"); - - // dynamic partition - stmt.execute("explain insert into " + table1 + " partition (p1) values " + - "(1, 2, 3, 4, 1.1, 2.2, 3.3, 'abcdef', 'ghi', true, 'b')," + - "(5, 6, 7, 8, 9.9, 8.8, 7.7, 'jklmno', 'pqr', true, 'b')"); - - stmt.execute("drop table if exists " + table2); - - stmt.execute("create table " + table2 + - "(c1 tinyint," + - " c2 smallint," + - " c3 int," + - " c4 bigint," + - " c5 float," + - " c6 double," + - " c7 decimal(8,2)," + - " c8 varchar(120)," + - " c9 char(10)," + - " c10 boolean)"); - - stmt.execute("explain insert into " + table2 + " values " + - "(1, 2, 3, 4, 1.1, 2.2, 3.3, 'abcdef', 'ghi', true)," + - "(5, 6, 7, 8, 9.9, 8.8, 7.7, 'jklmno', 'pqr', true)"); - - stmt.execute("explain insert overwrite table " + table2 + " select c1, c2, c3, c4, c5, c6, " + - "c7, c8, c9, c10 from " + table1); - - // multi-insert - stmt.execute("from " + table1 + - " insert into table " + table1 + " partition (p1 = 'c') " + - " select c1, c2, c3, c4, c5, c6, c7, c8, c9, c10" + - " insert into table " + table2 + " select c1, c2, c3, c4, c5, c6, c7, c8, c9, c10"); - } - } - - // This tests CTEs - @Test - public void cte() throws SQLException { - final String table1 = "odpi_cte_table1"; - try (Statement stmt = conn.createStatement()) { - stmt.execute("drop table if exists " + table1); - stmt.execute("create table " + table1 + "(c1 int, c2 varchar(32))"); - stmt.execute("with cte1 as (select c1 from " + table1 + " where c1 < 10) " + - " select c1 from cte1"); - } - } - - // This tests select, including CTEs, all/distinct, single tables, joins (inner & outer), - // group by (w/ and w/o having), order by, cluster by/distribute by/sort by, limit, union, - // subqueries, and over. - - @Test - public void select() throws SQLException { - final String[] tables = {"odpi_select_table1", "odpi_select_table2"}; - try (Statement stmt = conn.createStatement()) { - for (int i = 0; i < tables.length; i++) { - stmt.execute("drop table if exists " + tables[i]); - stmt.execute("create table " + tables[i] + "(c1 int, c2 varchar(32))"); - } - - // single table queries tested above in several places - - stmt.execute("explain select all a.c2, SUM(a.c1), SUM(b.c1) " + - "from " + tables[0] + " a join " + tables[1] + " b on (a.c2 = b.c2) " + - "group by a.c2 " + - "order by a.c2 asc " + - "limit 10"); - - stmt.execute("explain select distinct a.c2 " + - "from " + tables[0] + " a left outer join " + tables[1] + " b on (a.c2 = b.c2) " + - "order by a.c2 desc "); - - stmt.execute("explain select a.c2, SUM(a.c1) " + - "from " + tables[0] + " a right outer join " + tables[1] + " b on (a.c2 = b.c2) " + - "group by a.c2 " + - "having SUM(b.c1) > 0 " + - "order by a.c2 "); - - stmt.execute("explain select a.c2, rank() over (partition by a.c1) " + - "from " + tables[0] + " a full outer join " + tables[1] + " b on (a.c2 = b.c2) "); - - stmt.execute("explain select c2 from " + tables[0] + " union all select c2 from " + tables[1]); - - stmt.execute("explain select * from " + tables[0] + " distribute by c1 sort by c2"); - stmt.execute("explain select * from " + tables[0] + " cluster by c1"); - - stmt.execute("explain select * from (select c1 from " + tables[0] + ") t"); - stmt.execute("explain select * from " + tables[0] + " where c1 in (select c1 from " + tables[1] + - ")"); - - } - - } - - // Update and delete are not tested because not everyone configures their system to run - // with ACID. - - -} - - - - - diff --git a/bigtop-tests/spec-tests/runtime/src/test/java/org/odpi/specs/runtime/hive/TestThrift.java b/bigtop-tests/spec-tests/runtime/src/test/java/org/odpi/specs/runtime/hive/TestThrift.java deleted file mode 100644 index 8e0abda4..00000000 --- a/bigtop-tests/spec-tests/runtime/src/test/java/org/odpi/specs/runtime/hive/TestThrift.java +++ /dev/null @@ -1,251 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 - *

- * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.odpi.specs.runtime.hive; - -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; -import org.apache.hadoop.hive.conf.HiveConf; -import org.apache.hadoop.hive.metastore.HiveMetaStoreClient; -import org.apache.hadoop.hive.metastore.IMetaStoreClient; -import org.apache.hadoop.hive.metastore.TableType; -import org.apache.hadoop.hive.metastore.api.Database; -import org.apache.hadoop.hive.metastore.api.FieldSchema; -import org.apache.hadoop.hive.metastore.api.MetaException; -import org.apache.hadoop.hive.metastore.api.Partition; -import org.apache.hadoop.hive.metastore.api.SerDeInfo; -import org.apache.hadoop.hive.metastore.api.StorageDescriptor; -import org.apache.hadoop.hive.metastore.api.Table; -import org.apache.thrift.TException; -import org.junit.Assert; -import org.junit.Assume; -import org.junit.Before; -import org.junit.BeforeClass; -import org.junit.Test; - -import java.util.ArrayList; -import java.util.Collections; -import java.util.HashMap; -import java.util.List; -import java.util.Random; - -public class TestThrift { - - private static final Log LOG = LogFactory.getLog(TestThrift.class.getName()); - - private static IMetaStoreClient client = null; - private static HiveConf conf; - - private Random rand; - - @BeforeClass - public static void connect() throws MetaException { - if (JdbcConnector.testActive(JdbcConnector.TEST_THRIFT, "Test Thrift ")) { - String url = JdbcConnector.getProperty(JdbcConnector.METASTORE_URL, "Thrift metastore URL"); - conf = new HiveConf(); - conf.setVar(HiveConf.ConfVars.METASTOREURIS, url); - LOG.info("Set to test against metastore at " + url); - client = new HiveMetaStoreClient(conf); - } - } - - @Before - public void checkIfActive() { - Assume.assumeTrue(JdbcConnector.testActive(JdbcConnector.TEST_THRIFT, "Test Thrift ")); - rand = new Random(); - } - - @Test - public void db() throws TException { - final String dbName = "odpi_thrift_db_" + rand.nextInt(Integer.MAX_VALUE); - - Database db = new Database(dbName, "a db", null, new HashMap()); - client.createDatabase(db); - db = client.getDatabase(dbName); - Assert.assertNotNull(db); - db = new Database(db); - db.getParameters().put("a", "b"); - client.alterDatabase(dbName, db); - List alldbs = client.getDatabases("odpi_*"); - Assert.assertNotNull(alldbs); - Assert.assertTrue(alldbs.size() > 0); - alldbs = client.getAllDatabases(); - Assert.assertNotNull(alldbs); - Assert.assertTrue(alldbs.size() > 0); - client.dropDatabase(dbName, true, true); - } - - // Not testing types calls, as they aren't used AFAIK - - @Test - public void nonPartitionedTable() throws TException { - final String tableName = "odpi_thrift_table_" + rand.nextInt(Integer.MAX_VALUE); - - // I don't test every operation related to tables, but only those that are frequently used. - SerDeInfo serde = new SerDeInfo("default_serde", - conf.getVar(HiveConf.ConfVars.HIVEDEFAULTSERDE), new HashMap()); - FieldSchema fs = new FieldSchema("a", "int", "no comment"); - StorageDescriptor sd = new StorageDescriptor(Collections.singletonList(fs), null, - conf.getVar(HiveConf.ConfVars.HIVEDEFAULTFILEFORMAT), - conf.getVar(HiveConf.ConfVars.HIVEDEFAULTFILEFORMAT), false, 0, serde, null, null, - new HashMap()); - Table table = new Table(tableName, "default", "me", 0, 0, 0, sd, null, - new HashMap(), null, null, TableType.MANAGED_TABLE.toString()); - client.createTable(table); - - table = client.getTable("default", tableName); - Assert.assertNotNull(table); - - List

tables = - client.getTableObjectsByName("default", Collections.singletonList(tableName)); - Assert.assertNotNull(tables); - Assert.assertEquals(1, tables.size()); - - List tableNames = client.getTables("default", "odpi_*"); - Assert.assertNotNull(tableNames); - Assert.assertTrue(tableNames.size() >= 1); - - tableNames = client.getAllTables("default"); - Assert.assertNotNull(tableNames); - Assert.assertTrue(tableNames.size() >= 1); - - List cols = client.getFields("default", tableName); - Assert.assertNotNull(cols); - Assert.assertEquals(1, cols.size()); - - cols = client.getSchema("default", tableName); - Assert.assertNotNull(cols); - Assert.assertEquals(1, cols.size()); - - table = new Table(table); - table.getParameters().put("a", "b"); - client.alter_table("default", tableName, table, false); - - table.getParameters().put("c", "d"); - client.alter_table("default", tableName, table); - - client.dropTable("default", tableName, true, false); - } - - @Test - public void partitionedTable() throws TException { - final String tableName = "odpi_thrift_partitioned_table_" + rand.nextInt(Integer.MAX_VALUE); - - // I don't test every operation related to tables, but only those that are frequently used. - SerDeInfo serde = new SerDeInfo("default_serde", - conf.getVar(HiveConf.ConfVars.HIVEDEFAULTSERDE), new HashMap()); - FieldSchema fs = new FieldSchema("a", "int", "no comment"); - StorageDescriptor sd = new StorageDescriptor(Collections.singletonList(fs), null, - conf.getVar(HiveConf.ConfVars.HIVEDEFAULTFILEFORMAT), - conf.getVar(HiveConf.ConfVars.HIVEDEFAULTFILEFORMAT), false, 0, serde, null, null, - new HashMap()); - FieldSchema pk = new FieldSchema("pk", "string", ""); - Table table = new Table(tableName, "default", "me", 0, 0, 0, sd, Collections.singletonList(pk), - new HashMap(), null, null, TableType.MANAGED_TABLE.toString()); - client.createTable(table); - - sd = new StorageDescriptor(Collections.singletonList(fs), null, - conf.getVar(HiveConf.ConfVars.HIVEDEFAULTSERDE), - conf.getVar(HiveConf.ConfVars.HIVEDEFAULTSERDE), false, 0, serde, null, null, - new HashMap()); - Partition partition = new Partition(Collections.singletonList("x"), "default", tableName, 0, - 0, sd, new HashMap()); - client.add_partition(partition); - - List partitions = new ArrayList<>(2); - sd = new StorageDescriptor(Collections.singletonList(fs), null, - conf.getVar(HiveConf.ConfVars.HIVEDEFAULTSERDE), - conf.getVar(HiveConf.ConfVars.HIVEDEFAULTSERDE), false, 0, serde, null, null, - new HashMap()); - partitions.add(new Partition(Collections.singletonList("y"), "default", tableName, 0, - 0, sd, new HashMap())); - sd = new StorageDescriptor(Collections.singletonList(fs), null, - conf.getVar(HiveConf.ConfVars.HIVEDEFAULTSERDE), - conf.getVar(HiveConf.ConfVars.HIVEDEFAULTSERDE), false, 0, serde, null, null, - new HashMap()); - partitions.add(new Partition(Collections.singletonList("z"), "default", tableName, 0, - 0, sd, new HashMap())); - client.add_partitions(partitions); - - List parts = client.listPartitions("default", tableName, (short)-1); - Assert.assertNotNull(parts); - Assert.assertEquals(3, parts.size()); - - parts = client.listPartitions("default", tableName, Collections.singletonList("x"), - (short)-1); - Assert.assertNotNull(parts); - Assert.assertEquals(1, parts.size()); - - parts = client.listPartitionsWithAuthInfo("default", tableName, (short)-1, "me", - Collections.emptyList()); - Assert.assertNotNull(parts); - Assert.assertEquals(3, parts.size()); - - List partNames = client.listPartitionNames("default", tableName, (short)-1); - Assert.assertNotNull(partNames); - Assert.assertEquals(3, partNames.size()); - - parts = client.listPartitionsByFilter("default", tableName, "pk = \"x\"", (short)-1); - Assert.assertNotNull(parts); - Assert.assertEquals(1, parts.size()); - - parts = client.getPartitionsByNames("default", tableName, Collections.singletonList("pk=x")); - Assert.assertNotNull(parts); - Assert.assertEquals(1, parts.size()); - - partition = client.getPartition("default", tableName, Collections.singletonList("x")); - Assert.assertNotNull(partition); - - partition = client.getPartition("default", tableName, "pk=x"); - Assert.assertNotNull(partition); - - partition = client.getPartitionWithAuthInfo("default", tableName, Collections.singletonList("x"), - "me", Collections.emptyList()); - Assert.assertNotNull(partition); - - partition = new Partition(partition); - partition.getParameters().put("a", "b"); - client.alter_partition("default", tableName, partition); - - for (Partition p : parts) p.getParameters().put("c", "d"); - client.alter_partitions("default", tableName, parts); - - // Not testing get_partitions_by_expr because I don't want to hard code some byte sequence - // from the parser. The odds that anyone other than Hive parser would call this method seem - // low, since you'd have to exactly match the serliazation of the Hive parser. - - // Not testing partition marking events, not used by anyone but Hive replication AFAIK - - client.dropPartition("default", tableName, "pk=x", true); - client.dropPartition("default", tableName, Collections.singletonList("y"), true); - } - - // Not testing index calls, as no one uses indices - - - // Not sure if anyone uses stats calls or not. Other query engines might. Ignoring for now. - - // Not sure if anyone else uses functions, though I'm guessing not as without Hive classes they - // won't be runable. - - // Not testing authorization calls as AFAIK no one else uses Hive security - - // Not testing transaction/locking calls, as those are used only by Hive. - - // Not testing notification logging calls, as those are used only by Hive replication. - -} diff --git a/bigtop-tests/spec-tests/runtime/src/test/python/find-public-apis.py b/bigtop-tests/spec-tests/runtime/src/test/python/find-public-apis.py deleted file mode 100755 index 091c496e..00000000 --- a/bigtop-tests/spec-tests/runtime/src/test/python/find-public-apis.py +++ /dev/null @@ -1,80 +0,0 @@ -#!/usr/bin/python - -''' -Licensed to the Apache Software Foundation (ASF) under one -or more contributor license agreements. See the NOTICE file -distributed with this work for additional information -regarding copyright ownership. The ASF licenses this file -to you under the Apache License, Version 2.0 (the -"License"); you may not use this file except in compliance -with the License. You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -''' - -import os -import re -import warnings -from optparse import OptionParser - -def main(): - parser = OptionParser() - parser.add_option("-d", "--directory", help="Top level directory of source tree") - parser.add_option("-r", "--report", help="API compatibility report file, in HTML format") - - (options, args) = parser.parse_args() - - # Get the ATS endpoint if it's not given. - if options.directory == None: - print "You must specify a top level directory of the source tree" - return 1 - - if options.report == None: - print "You must specify the report to check against" - return 1 - - publicClasses = set() - for directory in os.walk(options.directory): - for afile in directory[2]: - if re.search("\.java$", afile) != None: - handle = open(os.path.join(directory[0], afile)) - # Figure out the package we're in - pre = re.search("org/apache/hadoop[\w/]*", directory[0]) - if pre == None: - warnings.warn("No package for " + directory[0]) - continue - package = pre.group(0) - expecting = 0 - for line in handle: - if re.search("@InterfaceAudience.Public", line) != None: - expecting = 1 - classname = re.search("class (\w*)", line) - if classname != None and expecting == 1: - publicClasses.add(package + "/" + classname.group(1)) - expecting = 0 - handle.close() - - handle = open(options.report) - haveChecked = set() - for line in handle: - classre = re.search("mangled: (org/apache/hadoop[\w/]+)", line) - if classre != None: - classname = classre.group(1) - if classname not in haveChecked: - if classname in publicClasses: - print "Warning, found change in public class " + classname - haveChecked.add(classname) - handle.close() - - - - -main() - - diff --git a/bigtop-tests/spec-tests/runtime/src/test/resources/hadoop-common-2.7.3-api-report.json b/bigtop-tests/spec-tests/runtime/src/test/resources/hadoop-common-2.7.3-api-report.json deleted file mode 100644 index 6a6c7af7..00000000 --- a/bigtop-tests/spec-tests/runtime/src/test/resources/hadoop-common-2.7.3-api-report.json +++ /dev/null @@ -1 +0,0 @@ -{"name":"hadoop-common","version":"2.7.3","classes":{"org.apache.hadoop.record.RecordInput":{"name":"org.apache.hadoop.record.RecordInput","methods":{"long readLong(java.lang.String) throws java.io.IOException":{"name":"readLong","returnType":"long","args":["java.lang.String"],"exceptions":["java.io.IOException"]},"void endMap(java.lang.String) throws java.io.IOException":{"name":"endMap","returnType":"void","args":["java.lang.String"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.record.Buffer readBuffer(java.lang.String) throws java.io.IOException":{"name":"readBuffer","returnType":"org.apache.hadoop.record.Buffer","args":["java.lang.String"],"exceptions":["java.io.IOException"]},"void endVector(java.lang.String) throws java.io.IOException":{"name":"endVector","returnType":"void","args":["java.lang.String"],"exceptions":["java.io.IOException"]},"byte readByte(java.lang.String) throws java.io.IOException":{"name":"readByte","returnType":"byte","args":["java.lang.String"],"exceptions":["java.io.IOException"]},"float readFloat(java.lang.String) throws java.io.IOException":{"name":"readFloat","returnType":"float","args":["java.lang.String"],"exceptions":["java.io.IOException"]},"void startRecord(java.lang.String) throws java.io.IOException":{"name":"startRecord","returnType":"void","args":["java.lang.String"],"exceptions":["java.io.IOException"]},"double readDouble(java.lang.String) throws java.io.IOException":{"name":"readDouble","returnType":"double","args":["java.lang.String"],"exceptions":["java.io.IOException"]},"void endRecord(java.lang.String) throws java.io.IOException":{"name":"endRecord","returnType":"void","args":["java.lang.String"],"exceptions":["java.io.IOException"]},"java.lang.String readString(java.lang.String) throws java.io.IOException":{"name":"readString","returnType":"java.lang.String","args":["java.lang.String"],"exceptions":["java.io.IOException"]},"int readInt(java.lang.String) throws java.io.IOException":{"name":"readInt","returnType":"int","args":["java.lang.String"],"exceptions":["java.io.IOException"]},"boolean readBool(java.lang.String) throws java.io.IOException":{"name":"readBool","returnType":"boolean","args":["java.lang.String"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.record.Index startVector(java.lang.String) throws java.io.IOException":{"name":"startVector","returnType":"org.apache.hadoop.record.Index","args":["java.lang.String"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.record.Index startMap(java.lang.String) throws java.io.IOException":{"name":"startMap","returnType":"org.apache.hadoop.record.Index","args":["java.lang.String"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.io.NullWritable":{"name":"org.apache.hadoop.io.NullWritable","methods":{"int hashCode()":{"name":"hashCode","returnType":"int","args":[],"exceptions":[]},"int compareTo(org.apache.hadoop.io.NullWritable)":{"name":"compareTo","returnType":"int","args":["org.apache.hadoop.io.NullWritable"],"exceptions":[]},"java.lang.String toString()":{"name":"toString","returnType":"java.lang.String","args":[],"exceptions":[]},"void write(java.io.DataOutput) throws java.io.IOException":{"name":"write","returnType":"void","args":["java.io.DataOutput"],"exceptions":["java.io.IOException"]},"int compareTo(java.lang.Object)":{"name":"compareTo","returnType":"int","args":["java.lang.Object"],"exceptions":[]},"org.apache.hadoop.io.NullWritable get()":{"name":"get","returnType":"org.apache.hadoop.io.NullWritable","args":[],"exceptions":[]},"boolean equals(java.lang.Object)":{"name":"equals","returnType":"boolean","args":["java.lang.Object"],"exceptions":[]},"void readFields(java.io.DataInput) throws java.io.IOException":{"name":"readFields","returnType":"void","args":["java.io.DataInput"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.record.XmlRecordInput":{"name":"org.apache.hadoop.record.XmlRecordInput","methods":{"long readLong(java.lang.String) throws java.io.IOException":{"name":"readLong","returnType":"long","args":["java.lang.String"],"exceptions":["java.io.IOException"]},"void endMap(java.lang.String) throws java.io.IOException":{"name":"endMap","returnType":"void","args":["java.lang.String"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.record.Buffer readBuffer(java.lang.String) throws java.io.IOException":{"name":"readBuffer","returnType":"org.apache.hadoop.record.Buffer","args":["java.lang.String"],"exceptions":["java.io.IOException"]},"void endVector(java.lang.String) throws java.io.IOException":{"name":"endVector","returnType":"void","args":["java.lang.String"],"exceptions":["java.io.IOException"]},"byte readByte(java.lang.String) throws java.io.IOException":{"name":"readByte","returnType":"byte","args":["java.lang.String"],"exceptions":["java.io.IOException"]},"float readFloat(java.lang.String) throws java.io.IOException":{"name":"readFloat","returnType":"float","args":["java.lang.String"],"exceptions":["java.io.IOException"]},"void startRecord(java.lang.String) throws java.io.IOException":{"name":"startRecord","returnType":"void","args":["java.lang.String"],"exceptions":["java.io.IOException"]},"double readDouble(java.lang.String) throws java.io.IOException":{"name":"readDouble","returnType":"double","args":["java.lang.String"],"exceptions":["java.io.IOException"]},"void endRecord(java.lang.String) throws java.io.IOException":{"name":"endRecord","returnType":"void","args":["java.lang.String"],"exceptions":["java.io.IOException"]},"java.lang.String readString(java.lang.String) throws java.io.IOException":{"name":"readString","returnType":"java.lang.String","args":["java.lang.String"],"exceptions":["java.io.IOException"]},"int readInt(java.lang.String) throws java.io.IOException":{"name":"readInt","returnType":"int","args":["java.lang.String"],"exceptions":["java.io.IOException"]},"boolean readBool(java.lang.String) throws java.io.IOException":{"name":"readBool","returnType":"boolean","args":["java.lang.String"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.record.Index startVector(java.lang.String) throws java.io.IOException":{"name":"startVector","returnType":"org.apache.hadoop.record.Index","args":["java.lang.String"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.record.Index startMap(java.lang.String) throws java.io.IOException":{"name":"startMap","returnType":"org.apache.hadoop.record.Index","args":["java.lang.String"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.fs.FileSystem":{"name":"org.apache.hadoop.fs.FileSystem","methods":{"org.apache.hadoop.security.token.Token getDelegationToken(java.lang.String) throws java.io.IOException":{"name":"getDelegationToken","returnType":"org.apache.hadoop.security.token.Token","args":["java.lang.String"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.FileSystem get(org.apache.hadoop.conf.Configuration) throws java.io.IOException":{"name":"get","returnType":"org.apache.hadoop.fs.FileSystem","args":["org.apache.hadoop.conf.Configuration"],"exceptions":["java.io.IOException"]},"short getDefaultReplication(org.apache.hadoop.fs.Path)":{"name":"getDefaultReplication","returnType":"short","args":["org.apache.hadoop.fs.Path"],"exceptions":[]},"[B getXAttr(org.apache.hadoop.fs.Path, java.lang.String) throws java.io.IOException":{"name":"getXAttr","returnType":"[B","args":["org.apache.hadoop.fs.Path","java.lang.String"],"exceptions":["java.io.IOException"]},"boolean setReplication(org.apache.hadoop.fs.Path, short) throws java.io.IOException":{"name":"setReplication","returnType":"boolean","args":["org.apache.hadoop.fs.Path","short"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.ContentSummary getContentSummary(org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"getContentSummary","returnType":"org.apache.hadoop.fs.ContentSummary","args":["org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.FSDataOutputStream create(org.apache.hadoop.fs.Path, boolean, int, short, long, org.apache.hadoop.util.Progressable) throws java.io.IOException":{"name":"create","returnType":"org.apache.hadoop.fs.FSDataOutputStream","args":["org.apache.hadoop.fs.Path","boolean","int","short","long","org.apache.hadoop.util.Progressable"],"exceptions":["java.io.IOException"]},"[Lorg.apache.hadoop.fs.FileStatus; globStatus(org.apache.hadoop.fs.Path, org.apache.hadoop.fs.PathFilter) throws java.io.IOException":{"name":"globStatus","returnType":"[Lorg.apache.hadoop.fs.FileStatus;","args":["org.apache.hadoop.fs.Path","org.apache.hadoop.fs.PathFilter"],"exceptions":["java.io.IOException"]},"[Lorg.apache.hadoop.fs.FileSystem; getChildFileSystems()":{"name":"getChildFileSystems","returnType":"[Lorg.apache.hadoop.fs.FileSystem;","args":[],"exceptions":[]},"org.apache.hadoop.fs.FSDataOutputStream create(org.apache.hadoop.fs.Path, org.apache.hadoop.fs.permission.FsPermission, java.util.EnumSet, int, short, long, org.apache.hadoop.util.Progressable) throws java.io.IOException":{"name":"create","returnType":"org.apache.hadoop.fs.FSDataOutputStream","args":["org.apache.hadoop.fs.Path","org.apache.hadoop.fs.permission.FsPermission","java.util.EnumSet","int","short","long","org.apache.hadoop.util.Progressable"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.FSDataOutputStream create(org.apache.hadoop.fs.Path, org.apache.hadoop.util.Progressable) throws java.io.IOException":{"name":"create","returnType":"org.apache.hadoop.fs.FSDataOutputStream","args":["org.apache.hadoop.fs.Path","org.apache.hadoop.util.Progressable"],"exceptions":["java.io.IOException"]},"java.lang.Class getFileSystemClass(java.lang.String, org.apache.hadoop.conf.Configuration) throws java.io.IOException":{"name":"getFileSystemClass","returnType":"java.lang.Class","args":["java.lang.String","org.apache.hadoop.conf.Configuration"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.FileSystem newInstance(java.net.URI, org.apache.hadoop.conf.Configuration, java.lang.String) throws java.lang.InterruptedException, java.io.IOException":{"name":"newInstance","returnType":"org.apache.hadoop.fs.FileSystem","args":["java.net.URI","org.apache.hadoop.conf.Configuration","java.lang.String"],"exceptions":["java.lang.InterruptedException","java.io.IOException"]},"java.util.Map getStatistics()":{"name":"getStatistics","returnType":"java.util.Map","args":[],"exceptions":[]},"org.apache.hadoop.fs.RemoteIterator listStatusIterator(org.apache.hadoop.fs.Path) throws java.io.IOException, java.io.FileNotFoundException":{"name":"listStatusIterator","returnType":"org.apache.hadoop.fs.RemoteIterator","args":["org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException","java.io.FileNotFoundException"]},"org.apache.hadoop.fs.FileSystem$Statistics getStatistics(java.lang.String, java.lang.Class)":{"name":"getStatistics","returnType":"org.apache.hadoop.fs.FileSystem$Statistics","args":["java.lang.String","java.lang.Class"],"exceptions":[]},"boolean isFile(org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"isFile","returnType":"boolean","args":["org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"void renameSnapshot(org.apache.hadoop.fs.Path, java.lang.String, java.lang.String) throws java.io.IOException":{"name":"renameSnapshot","returnType":"void","args":["org.apache.hadoop.fs.Path","java.lang.String","java.lang.String"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.FSDataOutputStream createNonRecursive(org.apache.hadoop.fs.Path, boolean, int, short, long, org.apache.hadoop.util.Progressable) throws java.io.IOException":{"name":"createNonRecursive","returnType":"org.apache.hadoop.fs.FSDataOutputStream","args":["org.apache.hadoop.fs.Path","boolean","int","short","long","org.apache.hadoop.util.Progressable"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.FsStatus getStatus(org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"getStatus","returnType":"org.apache.hadoop.fs.FsStatus","args":["org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"boolean mkdirs(org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"mkdirs","returnType":"boolean","args":["org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.Path createSnapshot(org.apache.hadoop.fs.Path, java.lang.String) throws java.io.IOException":{"name":"createSnapshot","returnType":"org.apache.hadoop.fs.Path","args":["org.apache.hadoop.fs.Path","java.lang.String"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.RemoteIterator listFiles(org.apache.hadoop.fs.Path, boolean) throws java.io.IOException, java.io.FileNotFoundException":{"name":"listFiles","returnType":"org.apache.hadoop.fs.RemoteIterator","args":["org.apache.hadoop.fs.Path","boolean"],"exceptions":["java.io.IOException","java.io.FileNotFoundException"]},"void copyToLocalFile(org.apache.hadoop.fs.Path, org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"copyToLocalFile","returnType":"void","args":["org.apache.hadoop.fs.Path","org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"boolean areSymlinksEnabled()":{"name":"areSymlinksEnabled","returnType":"boolean","args":[],"exceptions":[]},"boolean createNewFile(org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"createNewFile","returnType":"boolean","args":["org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.FSDataOutputStream append(org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"append","returnType":"org.apache.hadoop.fs.FSDataOutputStream","args":["org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"void removeAcl(org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"removeAcl","returnType":"void","args":["org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"boolean mkdirs(org.apache.hadoop.fs.Path, org.apache.hadoop.fs.permission.FsPermission) throws java.io.IOException":{"name":"mkdirs","returnType":"boolean","args":["org.apache.hadoop.fs.Path","org.apache.hadoop.fs.permission.FsPermission"],"exceptions":["java.io.IOException"]},"[Lorg.apache.hadoop.fs.BlockLocation; getFileBlockLocations(org.apache.hadoop.fs.Path, long, long) throws java.io.IOException":{"name":"getFileBlockLocations","returnType":"[Lorg.apache.hadoop.fs.BlockLocation;","args":["org.apache.hadoop.fs.Path","long","long"],"exceptions":["java.io.IOException"]},"boolean deleteOnExit(org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"deleteOnExit","returnType":"boolean","args":["org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.FSDataOutputStream createNonRecursive(org.apache.hadoop.fs.Path, org.apache.hadoop.fs.permission.FsPermission, boolean, int, short, long, org.apache.hadoop.util.Progressable) throws java.io.IOException":{"name":"createNonRecursive","returnType":"org.apache.hadoop.fs.FSDataOutputStream","args":["org.apache.hadoop.fs.Path","org.apache.hadoop.fs.permission.FsPermission","boolean","int","short","long","org.apache.hadoop.util.Progressable"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.LocalFileSystem getLocal(org.apache.hadoop.conf.Configuration) throws java.io.IOException":{"name":"getLocal","returnType":"org.apache.hadoop.fs.LocalFileSystem","args":["org.apache.hadoop.conf.Configuration"],"exceptions":["java.io.IOException"]},"void setDefaultUri(org.apache.hadoop.conf.Configuration, java.lang.String)":{"name":"setDefaultUri","returnType":"void","args":["org.apache.hadoop.conf.Configuration","java.lang.String"],"exceptions":[]},"org.apache.hadoop.fs.permission.AclStatus getAclStatus(org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"getAclStatus","returnType":"org.apache.hadoop.fs.permission.AclStatus","args":["org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"boolean mkdirs(org.apache.hadoop.fs.FileSystem, org.apache.hadoop.fs.Path, org.apache.hadoop.fs.permission.FsPermission) throws java.io.IOException":{"name":"mkdirs","returnType":"boolean","args":["org.apache.hadoop.fs.FileSystem","org.apache.hadoop.fs.Path","org.apache.hadoop.fs.permission.FsPermission"],"exceptions":["java.io.IOException"]},"void printStatistics() throws java.io.IOException":{"name":"printStatistics","returnType":"void","args":[],"exceptions":["java.io.IOException"]},"void setOwner(org.apache.hadoop.fs.Path, java.lang.String, java.lang.String) throws java.io.IOException":{"name":"setOwner","returnType":"void","args":["org.apache.hadoop.fs.Path","java.lang.String","java.lang.String"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.FsServerDefaults getServerDefaults(org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"getServerDefaults","returnType":"org.apache.hadoop.fs.FsServerDefaults","args":["org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"void moveFromLocalFile(org.apache.hadoop.fs.Path, org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"moveFromLocalFile","returnType":"void","args":["org.apache.hadoop.fs.Path","org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.FSDataOutputStream create(org.apache.hadoop.fs.FileSystem, org.apache.hadoop.fs.Path, org.apache.hadoop.fs.permission.FsPermission) throws java.io.IOException":{"name":"create","returnType":"org.apache.hadoop.fs.FSDataOutputStream","args":["org.apache.hadoop.fs.FileSystem","org.apache.hadoop.fs.Path","org.apache.hadoop.fs.permission.FsPermission"],"exceptions":["java.io.IOException"]},"void deleteSnapshot(org.apache.hadoop.fs.Path, java.lang.String) throws java.io.IOException":{"name":"deleteSnapshot","returnType":"void","args":["org.apache.hadoop.fs.Path","java.lang.String"],"exceptions":["java.io.IOException"]},"long getDefaultBlockSize()":{"name":"getDefaultBlockSize","returnType":"long","args":[],"exceptions":[]},"org.apache.hadoop.fs.FSDataInputStream open(org.apache.hadoop.fs.Path, int) throws java.io.IOException":{"name":"open","returnType":"org.apache.hadoop.fs.FSDataInputStream","args":["org.apache.hadoop.fs.Path","int"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.FsStatus getStatus() throws java.io.IOException":{"name":"getStatus","returnType":"org.apache.hadoop.fs.FsStatus","args":[],"exceptions":["java.io.IOException"]},"java.net.URI getUri()":{"name":"getUri","returnType":"java.net.URI","args":[],"exceptions":[]},"[Lorg.apache.hadoop.security.token.Token; addDelegationTokens(java.lang.String, org.apache.hadoop.security.Credentials) throws java.io.IOException":{"name":"addDelegationTokens","returnType":"[Lorg.apache.hadoop.security.token.Token;","args":["java.lang.String","org.apache.hadoop.security.Credentials"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.Path getHomeDirectory()":{"name":"getHomeDirectory","returnType":"org.apache.hadoop.fs.Path","args":[],"exceptions":[]},"void completeLocalOutput(org.apache.hadoop.fs.Path, org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"completeLocalOutput","returnType":"void","args":["org.apache.hadoop.fs.Path","org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"boolean rename(org.apache.hadoop.fs.Path, org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"rename","returnType":"boolean","args":["org.apache.hadoop.fs.Path","org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.FileStatus getFileStatus(org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"getFileStatus","returnType":"org.apache.hadoop.fs.FileStatus","args":["org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"void enableSymlinks()":{"name":"enableSymlinks","returnType":"void","args":[],"exceptions":[]},"void moveToLocalFile(org.apache.hadoop.fs.Path, org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"moveToLocalFile","returnType":"void","args":["org.apache.hadoop.fs.Path","org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.RemoteIterator listCorruptFileBlocks(org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"listCorruptFileBlocks","returnType":"org.apache.hadoop.fs.RemoteIterator","args":["org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"void setAcl(org.apache.hadoop.fs.Path, java.util.List) throws java.io.IOException":{"name":"setAcl","returnType":"void","args":["org.apache.hadoop.fs.Path","java.util.List"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.Path createSnapshot(org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"createSnapshot","returnType":"org.apache.hadoop.fs.Path","args":["org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.FileChecksum getFileChecksum(org.apache.hadoop.fs.Path, long) throws java.io.IOException":{"name":"getFileChecksum","returnType":"org.apache.hadoop.fs.FileChecksum","args":["org.apache.hadoop.fs.Path","long"],"exceptions":["java.io.IOException"]},"void setPermission(org.apache.hadoop.fs.Path, org.apache.hadoop.fs.permission.FsPermission) throws java.io.IOException":{"name":"setPermission","returnType":"void","args":["org.apache.hadoop.fs.Path","org.apache.hadoop.fs.permission.FsPermission"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.Path startLocalOutput(org.apache.hadoop.fs.Path, org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"startLocalOutput","returnType":"org.apache.hadoop.fs.Path","args":["org.apache.hadoop.fs.Path","org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"void setWriteChecksum(boolean)":{"name":"setWriteChecksum","returnType":"void","args":["boolean"],"exceptions":[]},"java.lang.String getScheme()":{"name":"getScheme","returnType":"java.lang.String","args":[],"exceptions":[]},"boolean delete(org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"delete","returnType":"boolean","args":["org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"boolean isDirectory(org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"isDirectory","returnType":"boolean","args":["org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"void copyToLocalFile(boolean, org.apache.hadoop.fs.Path, org.apache.hadoop.fs.Path, boolean) throws java.io.IOException":{"name":"copyToLocalFile","returnType":"void","args":["boolean","org.apache.hadoop.fs.Path","org.apache.hadoop.fs.Path","boolean"],"exceptions":["java.io.IOException"]},"void copyFromLocalFile(org.apache.hadoop.fs.Path, org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"copyFromLocalFile","returnType":"void","args":["org.apache.hadoop.fs.Path","org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"short getDefaultReplication()":{"name":"getDefaultReplication","returnType":"short","args":[],"exceptions":[]},"void setXAttr(org.apache.hadoop.fs.Path, java.lang.String, [B) throws java.io.IOException":{"name":"setXAttr","returnType":"void","args":["org.apache.hadoop.fs.Path","java.lang.String","[B"],"exceptions":["java.io.IOException"]},"boolean cancelDeleteOnExit(org.apache.hadoop.fs.Path)":{"name":"cancelDeleteOnExit","returnType":"boolean","args":["org.apache.hadoop.fs.Path"],"exceptions":[]},"void copyToLocalFile(boolean, org.apache.hadoop.fs.Path, org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"copyToLocalFile","returnType":"void","args":["boolean","org.apache.hadoop.fs.Path","org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"[Lorg.apache.hadoop.fs.BlockLocation; getFileBlockLocations(org.apache.hadoop.fs.FileStatus, long, long) throws java.io.IOException":{"name":"getFileBlockLocations","returnType":"[Lorg.apache.hadoop.fs.BlockLocation;","args":["org.apache.hadoop.fs.FileStatus","long","long"],"exceptions":["java.io.IOException"]},"[Lorg.apache.hadoop.fs.FileStatus; listStatus(org.apache.hadoop.fs.Path, org.apache.hadoop.fs.PathFilter) throws java.io.IOException, java.io.FileNotFoundException":{"name":"listStatus","returnType":"[Lorg.apache.hadoop.fs.FileStatus;","args":["org.apache.hadoop.fs.Path","org.apache.hadoop.fs.PathFilter"],"exceptions":["java.io.IOException","java.io.FileNotFoundException"]},"void removeDefaultAcl(org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"removeDefaultAcl","returnType":"void","args":["org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.FileSystem getNamed(java.lang.String, org.apache.hadoop.conf.Configuration) throws java.io.IOException":{"name":"getNamed","returnType":"org.apache.hadoop.fs.FileSystem","args":["java.lang.String","org.apache.hadoop.conf.Configuration"],"exceptions":["java.io.IOException"]},"[Lorg.apache.hadoop.fs.FileStatus; listStatus([Lorg.apache.hadoop.fs.Path;, org.apache.hadoop.fs.PathFilter) throws java.io.IOException, java.io.FileNotFoundException":{"name":"listStatus","returnType":"[Lorg.apache.hadoop.fs.FileStatus;","args":["[Lorg.apache.hadoop.fs.Path;","org.apache.hadoop.fs.PathFilter"],"exceptions":["java.io.IOException","java.io.FileNotFoundException"]},"long getDefaultBlockSize(org.apache.hadoop.fs.Path)":{"name":"getDefaultBlockSize","returnType":"long","args":["org.apache.hadoop.fs.Path"],"exceptions":[]},"void concat(org.apache.hadoop.fs.Path, [Lorg.apache.hadoop.fs.Path;) throws java.io.IOException":{"name":"concat","returnType":"void","args":["org.apache.hadoop.fs.Path","[Lorg.apache.hadoop.fs.Path;"],"exceptions":["java.io.IOException"]},"void initialize(java.net.URI, org.apache.hadoop.conf.Configuration) throws java.io.IOException":{"name":"initialize","returnType":"void","args":["java.net.URI","org.apache.hadoop.conf.Configuration"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.FSDataOutputStream create(org.apache.hadoop.fs.Path, boolean, int) throws java.io.IOException":{"name":"create","returnType":"org.apache.hadoop.fs.FSDataOutputStream","args":["org.apache.hadoop.fs.Path","boolean","int"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.FSDataOutputStream create(org.apache.hadoop.fs.Path, short, org.apache.hadoop.util.Progressable) throws java.io.IOException":{"name":"create","returnType":"org.apache.hadoop.fs.FSDataOutputStream","args":["org.apache.hadoop.fs.Path","short","org.apache.hadoop.util.Progressable"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.Path makeQualified(org.apache.hadoop.fs.Path)":{"name":"makeQualified","returnType":"org.apache.hadoop.fs.Path","args":["org.apache.hadoop.fs.Path"],"exceptions":[]},"org.apache.hadoop.fs.FSDataOutputStream create(org.apache.hadoop.fs.Path, short) throws java.io.IOException":{"name":"create","returnType":"org.apache.hadoop.fs.FSDataOutputStream","args":["org.apache.hadoop.fs.Path","short"],"exceptions":["java.io.IOException"]},"void setTimes(org.apache.hadoop.fs.Path, long, long) throws java.io.IOException":{"name":"setTimes","returnType":"void","args":["org.apache.hadoop.fs.Path","long","long"],"exceptions":["java.io.IOException"]},"long getUsed() throws java.io.IOException":{"name":"getUsed","returnType":"long","args":[],"exceptions":["java.io.IOException"]},"void moveFromLocalFile([Lorg.apache.hadoop.fs.Path;, org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"moveFromLocalFile","returnType":"void","args":["[Lorg.apache.hadoop.fs.Path;","org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"java.net.URI getDefaultUri(org.apache.hadoop.conf.Configuration)":{"name":"getDefaultUri","returnType":"java.net.URI","args":["org.apache.hadoop.conf.Configuration"],"exceptions":[]},"org.apache.hadoop.fs.FSDataOutputStream create(org.apache.hadoop.fs.Path, boolean, int, short, long) throws java.io.IOException":{"name":"create","returnType":"org.apache.hadoop.fs.FSDataOutputStream","args":["org.apache.hadoop.fs.Path","boolean","int","short","long"],"exceptions":["java.io.IOException"]},"java.util.Map getXAttrs(org.apache.hadoop.fs.Path, java.util.List) throws java.io.IOException":{"name":"getXAttrs","returnType":"java.util.Map","args":["org.apache.hadoop.fs.Path","java.util.List"],"exceptions":["java.io.IOException"]},"void setVerifyChecksum(boolean)":{"name":"setVerifyChecksum","returnType":"void","args":["boolean"],"exceptions":[]},"org.apache.hadoop.fs.FileStatus getFileLinkStatus(org.apache.hadoop.fs.Path) throws org.apache.hadoop.security.AccessControlException, org.apache.hadoop.fs.UnsupportedFileSystemException, java.io.IOException, java.io.FileNotFoundException":{"name":"getFileLinkStatus","returnType":"org.apache.hadoop.fs.FileStatus","args":["org.apache.hadoop.fs.Path"],"exceptions":["org.apache.hadoop.security.AccessControlException","org.apache.hadoop.fs.UnsupportedFileSystemException","java.io.IOException","java.io.FileNotFoundException"]},"void copyFromLocalFile(boolean, boolean, org.apache.hadoop.fs.Path, org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"copyFromLocalFile","returnType":"void","args":["boolean","boolean","org.apache.hadoop.fs.Path","org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"[Lorg.apache.hadoop.fs.FileStatus; globStatus(org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"globStatus","returnType":"[Lorg.apache.hadoop.fs.FileStatus;","args":["org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.FSDataOutputStream createNonRecursive(org.apache.hadoop.fs.Path, org.apache.hadoop.fs.permission.FsPermission, java.util.EnumSet, int, short, long, org.apache.hadoop.util.Progressable) throws java.io.IOException":{"name":"createNonRecursive","returnType":"org.apache.hadoop.fs.FSDataOutputStream","args":["org.apache.hadoop.fs.Path","org.apache.hadoop.fs.permission.FsPermission","java.util.EnumSet","int","short","long","org.apache.hadoop.util.Progressable"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.Path getLinkTarget(org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"getLinkTarget","returnType":"org.apache.hadoop.fs.Path","args":["org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.FileSystem get(java.net.URI, org.apache.hadoop.conf.Configuration, java.lang.String) throws java.lang.InterruptedException, java.io.IOException":{"name":"get","returnType":"org.apache.hadoop.fs.FileSystem","args":["java.net.URI","org.apache.hadoop.conf.Configuration","java.lang.String"],"exceptions":["java.lang.InterruptedException","java.io.IOException"]},"void createSymlink(org.apache.hadoop.fs.Path, org.apache.hadoop.fs.Path, boolean) throws org.apache.hadoop.fs.FileAlreadyExistsException, org.apache.hadoop.security.AccessControlException, org.apache.hadoop.fs.ParentNotDirectoryException, org.apache.hadoop.fs.UnsupportedFileSystemException, java.io.IOException, java.io.FileNotFoundException":{"name":"createSymlink","returnType":"void","args":["org.apache.hadoop.fs.Path","org.apache.hadoop.fs.Path","boolean"],"exceptions":["org.apache.hadoop.fs.FileAlreadyExistsException","org.apache.hadoop.security.AccessControlException","org.apache.hadoop.fs.ParentNotDirectoryException","org.apache.hadoop.fs.UnsupportedFileSystemException","java.io.IOException","java.io.FileNotFoundException"]},"org.apache.hadoop.fs.FSDataOutputStream append(org.apache.hadoop.fs.Path, int) throws java.io.IOException":{"name":"append","returnType":"org.apache.hadoop.fs.FSDataOutputStream","args":["org.apache.hadoop.fs.Path","int"],"exceptions":["java.io.IOException"]},"boolean delete(org.apache.hadoop.fs.Path, boolean) throws java.io.IOException":{"name":"delete","returnType":"boolean","args":["org.apache.hadoop.fs.Path","boolean"],"exceptions":["java.io.IOException"]},"void copyFromLocalFile(boolean, org.apache.hadoop.fs.Path, org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"copyFromLocalFile","returnType":"void","args":["boolean","org.apache.hadoop.fs.Path","org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"java.util.List getAllStatistics()":{"name":"getAllStatistics","returnType":"java.util.List","args":[],"exceptions":[]},"void access(org.apache.hadoop.fs.Path, org.apache.hadoop.fs.permission.FsAction) throws org.apache.hadoop.security.AccessControlException, java.io.IOException, java.io.FileNotFoundException":{"name":"access","returnType":"void","args":["org.apache.hadoop.fs.Path","org.apache.hadoop.fs.permission.FsAction"],"exceptions":["org.apache.hadoop.security.AccessControlException","java.io.IOException","java.io.FileNotFoundException"]},"org.apache.hadoop.fs.FSDataOutputStream create(org.apache.hadoop.fs.Path, boolean) throws java.io.IOException":{"name":"create","returnType":"org.apache.hadoop.fs.FSDataOutputStream","args":["org.apache.hadoop.fs.Path","boolean"],"exceptions":["java.io.IOException"]},"void removeAclEntries(org.apache.hadoop.fs.Path, java.util.List) throws java.io.IOException":{"name":"removeAclEntries","returnType":"void","args":["org.apache.hadoop.fs.Path","java.util.List"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.FSDataOutputStream create(org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"create","returnType":"org.apache.hadoop.fs.FSDataOutputStream","args":["org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"void closeAllForUGI(org.apache.hadoop.security.UserGroupInformation) throws java.io.IOException":{"name":"closeAllForUGI","returnType":"void","args":["org.apache.hadoop.security.UserGroupInformation"],"exceptions":["java.io.IOException"]},"void setDefaultUri(org.apache.hadoop.conf.Configuration, java.net.URI)":{"name":"setDefaultUri","returnType":"void","args":["org.apache.hadoop.conf.Configuration","java.net.URI"],"exceptions":[]},"org.apache.hadoop.fs.FSDataOutputStream create(org.apache.hadoop.fs.Path, boolean, int, org.apache.hadoop.util.Progressable) throws java.io.IOException":{"name":"create","returnType":"org.apache.hadoop.fs.FSDataOutputStream","args":["org.apache.hadoop.fs.Path","boolean","int","org.apache.hadoop.util.Progressable"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.FSDataOutputStream append(org.apache.hadoop.fs.Path, int, org.apache.hadoop.util.Progressable) throws java.io.IOException":{"name":"append","returnType":"org.apache.hadoop.fs.FSDataOutputStream","args":["org.apache.hadoop.fs.Path","int","org.apache.hadoop.util.Progressable"],"exceptions":["java.io.IOException"]},"void clearStatistics()":{"name":"clearStatistics","returnType":"void","args":[],"exceptions":[]},"org.apache.hadoop.fs.FileChecksum getFileChecksum(org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"getFileChecksum","returnType":"org.apache.hadoop.fs.FileChecksum","args":["org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"void removeXAttr(org.apache.hadoop.fs.Path, java.lang.String) throws java.io.IOException":{"name":"removeXAttr","returnType":"void","args":["org.apache.hadoop.fs.Path","java.lang.String"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.FileSystem newInstance(java.net.URI, org.apache.hadoop.conf.Configuration) throws java.io.IOException":{"name":"newInstance","returnType":"org.apache.hadoop.fs.FileSystem","args":["java.net.URI","org.apache.hadoop.conf.Configuration"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.Path getWorkingDirectory()":{"name":"getWorkingDirectory","returnType":"org.apache.hadoop.fs.Path","args":[],"exceptions":[]},"org.apache.hadoop.fs.FileSystem get(java.net.URI, org.apache.hadoop.conf.Configuration) throws java.io.IOException":{"name":"get","returnType":"org.apache.hadoop.fs.FileSystem","args":["java.net.URI","org.apache.hadoop.conf.Configuration"],"exceptions":["java.io.IOException"]},"java.util.List listXAttrs(org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"listXAttrs","returnType":"java.util.List","args":["org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.FileSystem newInstance(org.apache.hadoop.conf.Configuration) throws java.io.IOException":{"name":"newInstance","returnType":"org.apache.hadoop.fs.FileSystem","args":["org.apache.hadoop.conf.Configuration"],"exceptions":["java.io.IOException"]},"void setWorkingDirectory(org.apache.hadoop.fs.Path)":{"name":"setWorkingDirectory","returnType":"void","args":["org.apache.hadoop.fs.Path"],"exceptions":[]},"void copyFromLocalFile(boolean, boolean, [Lorg.apache.hadoop.fs.Path;, org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"copyFromLocalFile","returnType":"void","args":["boolean","boolean","[Lorg.apache.hadoop.fs.Path;","org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.Path resolvePath(org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"resolvePath","returnType":"org.apache.hadoop.fs.Path","args":["org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"java.util.Map getXAttrs(org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"getXAttrs","returnType":"java.util.Map","args":["org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"long getLength(org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"getLength","returnType":"long","args":["org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"java.lang.String getCanonicalServiceName()":{"name":"getCanonicalServiceName","returnType":"java.lang.String","args":[],"exceptions":[]},"long getBlockSize(org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"getBlockSize","returnType":"long","args":["org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"short getReplication(org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"getReplication","returnType":"short","args":["org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"void modifyAclEntries(org.apache.hadoop.fs.Path, java.util.List) throws java.io.IOException":{"name":"modifyAclEntries","returnType":"void","args":["org.apache.hadoop.fs.Path","java.util.List"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.RemoteIterator listLocatedStatus(org.apache.hadoop.fs.Path) throws java.io.IOException, java.io.FileNotFoundException":{"name":"listLocatedStatus","returnType":"org.apache.hadoop.fs.RemoteIterator","args":["org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException","java.io.FileNotFoundException"]},"org.apache.hadoop.fs.FSDataInputStream open(org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"open","returnType":"org.apache.hadoop.fs.FSDataInputStream","args":["org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"[Lorg.apache.hadoop.fs.FileStatus; listStatus(org.apache.hadoop.fs.Path) throws java.io.IOException, java.io.FileNotFoundException":{"name":"listStatus","returnType":"[Lorg.apache.hadoop.fs.FileStatus;","args":["org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException","java.io.FileNotFoundException"]},"org.apache.hadoop.fs.LocalFileSystem newInstanceLocal(org.apache.hadoop.conf.Configuration) throws java.io.IOException":{"name":"newInstanceLocal","returnType":"org.apache.hadoop.fs.LocalFileSystem","args":["org.apache.hadoop.conf.Configuration"],"exceptions":["java.io.IOException"]},"boolean exists(org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"exists","returnType":"boolean","args":["org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"void setXAttr(org.apache.hadoop.fs.Path, java.lang.String, [B, java.util.EnumSet) throws java.io.IOException":{"name":"setXAttr","returnType":"void","args":["org.apache.hadoop.fs.Path","java.lang.String","[B","java.util.EnumSet"],"exceptions":["java.io.IOException"]},"boolean supportsSymlinks()":{"name":"supportsSymlinks","returnType":"boolean","args":[],"exceptions":[]},"java.lang.String getName()":{"name":"getName","returnType":"java.lang.String","args":[],"exceptions":[]},"org.apache.hadoop.fs.FSDataOutputStream create(org.apache.hadoop.fs.Path, org.apache.hadoop.fs.permission.FsPermission, java.util.EnumSet, int, short, long, org.apache.hadoop.util.Progressable, org.apache.hadoop.fs.Options$ChecksumOpt) throws java.io.IOException":{"name":"create","returnType":"org.apache.hadoop.fs.FSDataOutputStream","args":["org.apache.hadoop.fs.Path","org.apache.hadoop.fs.permission.FsPermission","java.util.EnumSet","int","short","long","org.apache.hadoop.util.Progressable","org.apache.hadoop.fs.Options$ChecksumOpt"],"exceptions":["java.io.IOException"]},"boolean truncate(org.apache.hadoop.fs.Path, long) throws java.io.IOException":{"name":"truncate","returnType":"boolean","args":["org.apache.hadoop.fs.Path","long"],"exceptions":["java.io.IOException"]},"void closeAll() throws java.io.IOException":{"name":"closeAll","returnType":"void","args":[],"exceptions":["java.io.IOException"]},"[Lorg.apache.hadoop.fs.FileStatus; listStatus([Lorg.apache.hadoop.fs.Path;) throws java.io.IOException, java.io.FileNotFoundException":{"name":"listStatus","returnType":"[Lorg.apache.hadoop.fs.FileStatus;","args":["[Lorg.apache.hadoop.fs.Path;"],"exceptions":["java.io.IOException","java.io.FileNotFoundException"]},"org.apache.hadoop.fs.FSDataOutputStream create(org.apache.hadoop.fs.Path, org.apache.hadoop.fs.permission.FsPermission, boolean, int, short, long, org.apache.hadoop.util.Progressable) throws java.io.IOException":{"name":"create","returnType":"org.apache.hadoop.fs.FSDataOutputStream","args":["org.apache.hadoop.fs.Path","org.apache.hadoop.fs.permission.FsPermission","boolean","int","short","long","org.apache.hadoop.util.Progressable"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.FsServerDefaults getServerDefaults() throws java.io.IOException":{"name":"getServerDefaults","returnType":"org.apache.hadoop.fs.FsServerDefaults","args":[],"exceptions":["java.io.IOException"]},"void close() throws java.io.IOException":{"name":"close","returnType":"void","args":[],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.fs.BlockLocation":{"name":"org.apache.hadoop.fs.BlockLocation","methods":{"[Ljava.lang.String; getCachedHosts()":{"name":"getCachedHosts","returnType":"[Ljava.lang.String;","args":[],"exceptions":[]},"void setTopologyPaths([Ljava.lang.String;) throws java.io.IOException":{"name":"setTopologyPaths","returnType":"void","args":["[Ljava.lang.String;"],"exceptions":["java.io.IOException"]},"void setHosts([Ljava.lang.String;) throws java.io.IOException":{"name":"setHosts","returnType":"void","args":["[Ljava.lang.String;"],"exceptions":["java.io.IOException"]},"void setCorrupt(boolean)":{"name":"setCorrupt","returnType":"void","args":["boolean"],"exceptions":[]},"[Ljava.lang.String; getNames() throws java.io.IOException":{"name":"getNames","returnType":"[Ljava.lang.String;","args":[],"exceptions":["java.io.IOException"]},"[Ljava.lang.String; getTopologyPaths() throws java.io.IOException":{"name":"getTopologyPaths","returnType":"[Ljava.lang.String;","args":[],"exceptions":["java.io.IOException"]},"long getLength()":{"name":"getLength","returnType":"long","args":[],"exceptions":[]},"java.lang.String toString()":{"name":"toString","returnType":"java.lang.String","args":[],"exceptions":[]},"long getOffset()":{"name":"getOffset","returnType":"long","args":[],"exceptions":[]},"void setOffset(long)":{"name":"setOffset","returnType":"void","args":["long"],"exceptions":[]},"void setNames([Ljava.lang.String;) throws java.io.IOException":{"name":"setNames","returnType":"void","args":["[Ljava.lang.String;"],"exceptions":["java.io.IOException"]},"void setLength(long)":{"name":"setLength","returnType":"void","args":["long"],"exceptions":[]},"[Ljava.lang.String; getHosts() throws java.io.IOException":{"name":"getHosts","returnType":"[Ljava.lang.String;","args":[],"exceptions":["java.io.IOException"]},"boolean isCorrupt()":{"name":"isCorrupt","returnType":"boolean","args":[],"exceptions":[]},"void setCachedHosts([Ljava.lang.String;)":{"name":"setCachedHosts","returnType":"void","args":["[Ljava.lang.String;"],"exceptions":[]}}},"org.apache.hadoop.io.Text":{"name":"org.apache.hadoop.io.Text","methods":{"java.lang.String readString(java.io.DataInput, int) throws java.io.IOException":{"name":"readString","returnType":"java.lang.String","args":["java.io.DataInput","int"],"exceptions":["java.io.IOException"]},"int hashCode()":{"name":"hashCode","returnType":"int","args":[],"exceptions":[]},"void set(org.apache.hadoop.io.Text)":{"name":"set","returnType":"void","args":["org.apache.hadoop.io.Text"],"exceptions":[]},"void validateUTF8([B, int, int) throws java.nio.charset.MalformedInputException":{"name":"validateUTF8","returnType":"void","args":["[B","int","int"],"exceptions":["java.nio.charset.MalformedInputException"]},"int getLength()":{"name":"getLength","returnType":"int","args":[],"exceptions":[]},"void readFields(java.io.DataInput, int) throws java.io.IOException":{"name":"readFields","returnType":"void","args":["java.io.DataInput","int"],"exceptions":["java.io.IOException"]},"void set([B, int, int)":{"name":"set","returnType":"void","args":["[B","int","int"],"exceptions":[]},"int bytesToCodePoint(java.nio.ByteBuffer)":{"name":"bytesToCodePoint","returnType":"int","args":["java.nio.ByteBuffer"],"exceptions":[]},"boolean equals(java.lang.Object)":{"name":"equals","returnType":"boolean","args":["java.lang.Object"],"exceptions":[]},"void append([B, int, int)":{"name":"append","returnType":"void","args":["[B","int","int"],"exceptions":[]},"java.lang.String decode([B, int, int, boolean) throws java.nio.charset.CharacterCodingException":{"name":"decode","returnType":"java.lang.String","args":["[B","int","int","boolean"],"exceptions":["java.nio.charset.CharacterCodingException"]},"java.nio.ByteBuffer encode(java.lang.String, boolean) throws java.nio.charset.CharacterCodingException":{"name":"encode","returnType":"java.nio.ByteBuffer","args":["java.lang.String","boolean"],"exceptions":["java.nio.charset.CharacterCodingException"]},"int writeString(java.io.DataOutput, java.lang.String, int) throws java.io.IOException":{"name":"writeString","returnType":"int","args":["java.io.DataOutput","java.lang.String","int"],"exceptions":["java.io.IOException"]},"java.lang.String toString()":{"name":"toString","returnType":"java.lang.String","args":[],"exceptions":[]},"void write(java.io.DataOutput) throws java.io.IOException":{"name":"write","returnType":"void","args":["java.io.DataOutput"],"exceptions":["java.io.IOException"]},"java.lang.String decode([B, int, int) throws java.nio.charset.CharacterCodingException":{"name":"decode","returnType":"java.lang.String","args":["[B","int","int"],"exceptions":["java.nio.charset.CharacterCodingException"]},"java.nio.ByteBuffer encode(java.lang.String) throws java.nio.charset.CharacterCodingException":{"name":"encode","returnType":"java.nio.ByteBuffer","args":["java.lang.String"],"exceptions":["java.nio.charset.CharacterCodingException"]},"int writeString(java.io.DataOutput, java.lang.String) throws java.io.IOException":{"name":"writeString","returnType":"int","args":["java.io.DataOutput","java.lang.String"],"exceptions":["java.io.IOException"]},"[B getBytes()":{"name":"getBytes","returnType":"[B","args":[],"exceptions":[]},"void clear()":{"name":"clear","returnType":"void","args":[],"exceptions":[]},"void write(java.io.DataOutput, int) throws java.io.IOException":{"name":"write","returnType":"void","args":["java.io.DataOutput","int"],"exceptions":["java.io.IOException"]},"void set(java.lang.String)":{"name":"set","returnType":"void","args":["java.lang.String"],"exceptions":[]},"int utf8Length(java.lang.String)":{"name":"utf8Length","returnType":"int","args":["java.lang.String"],"exceptions":[]},"void readWithKnownLength(java.io.DataInput, int) throws java.io.IOException":{"name":"readWithKnownLength","returnType":"void","args":["java.io.DataInput","int"],"exceptions":["java.io.IOException"]},"java.lang.String readString(java.io.DataInput) throws java.io.IOException":{"name":"readString","returnType":"java.lang.String","args":["java.io.DataInput"],"exceptions":["java.io.IOException"]},"java.lang.String decode([B) throws java.nio.charset.CharacterCodingException":{"name":"decode","returnType":"java.lang.String","args":["[B"],"exceptions":["java.nio.charset.CharacterCodingException"]},"void skip(java.io.DataInput) throws java.io.IOException":{"name":"skip","returnType":"void","args":["java.io.DataInput"],"exceptions":["java.io.IOException"]},"int charAt(int)":{"name":"charAt","returnType":"int","args":["int"],"exceptions":[]},"int find(java.lang.String, int)":{"name":"find","returnType":"int","args":["java.lang.String","int"],"exceptions":[]},"void set([B)":{"name":"set","returnType":"void","args":["[B"],"exceptions":[]},"int find(java.lang.String)":{"name":"find","returnType":"int","args":["java.lang.String"],"exceptions":[]},"[B copyBytes()":{"name":"copyBytes","returnType":"[B","args":[],"exceptions":[]},"void validateUTF8([B) throws java.nio.charset.MalformedInputException":{"name":"validateUTF8","returnType":"void","args":["[B"],"exceptions":["java.nio.charset.MalformedInputException"]},"void readFields(java.io.DataInput) throws java.io.IOException":{"name":"readFields","returnType":"void","args":["java.io.DataInput"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.io.Writable":{"name":"org.apache.hadoop.io.Writable","methods":{"void write(java.io.DataOutput) throws java.io.IOException":{"name":"write","returnType":"void","args":["java.io.DataOutput"],"exceptions":["java.io.IOException"]},"void readFields(java.io.DataInput) throws java.io.IOException":{"name":"readFields","returnType":"void","args":["java.io.DataInput"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.io.VLongWritable":{"name":"org.apache.hadoop.io.VLongWritable","methods":{"int hashCode()":{"name":"hashCode","returnType":"int","args":[],"exceptions":[]},"void set(long)":{"name":"set","returnType":"void","args":["long"],"exceptions":[]},"long get()":{"name":"get","returnType":"long","args":[],"exceptions":[]},"int compareTo(org.apache.hadoop.io.VLongWritable)":{"name":"compareTo","returnType":"int","args":["org.apache.hadoop.io.VLongWritable"],"exceptions":[]},"java.lang.String toString()":{"name":"toString","returnType":"java.lang.String","args":[],"exceptions":[]},"void write(java.io.DataOutput) throws java.io.IOException":{"name":"write","returnType":"void","args":["java.io.DataOutput"],"exceptions":["java.io.IOException"]},"int compareTo(java.lang.Object)":{"name":"compareTo","returnType":"int","args":["java.lang.Object"],"exceptions":[]},"boolean equals(java.lang.Object)":{"name":"equals","returnType":"boolean","args":["java.lang.Object"],"exceptions":[]},"void readFields(java.io.DataInput) throws java.io.IOException":{"name":"readFields","returnType":"void","args":["java.io.DataInput"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.io.VersionedWritable":{"name":"org.apache.hadoop.io.VersionedWritable","methods":{"void write(java.io.DataOutput) throws java.io.IOException":{"name":"write","returnType":"void","args":["java.io.DataOutput"],"exceptions":["java.io.IOException"]},"byte getVersion()":{"name":"getVersion","returnType":"byte","args":[],"exceptions":[]},"void readFields(java.io.DataInput) throws java.io.IOException":{"name":"readFields","returnType":"void","args":["java.io.DataInput"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.io.SequenceFile":{"name":"org.apache.hadoop.io.SequenceFile","methods":{"org.apache.hadoop.io.SequenceFile$Writer createWriter(org.apache.hadoop.fs.FileSystem, org.apache.hadoop.conf.Configuration, org.apache.hadoop.fs.Path, java.lang.Class, java.lang.Class, org.apache.hadoop.io.SequenceFile$CompressionType) throws java.io.IOException":{"name":"createWriter","returnType":"org.apache.hadoop.io.SequenceFile$Writer","args":["org.apache.hadoop.fs.FileSystem","org.apache.hadoop.conf.Configuration","org.apache.hadoop.fs.Path","java.lang.Class","java.lang.Class","org.apache.hadoop.io.SequenceFile$CompressionType"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.io.SequenceFile$Writer createWriter(org.apache.hadoop.fs.FileContext, org.apache.hadoop.conf.Configuration, org.apache.hadoop.fs.Path, java.lang.Class, java.lang.Class, org.apache.hadoop.io.SequenceFile$CompressionType, org.apache.hadoop.io.compress.CompressionCodec, org.apache.hadoop.io.SequenceFile$Metadata, java.util.EnumSet, [Lorg.apache.hadoop.fs.Options$CreateOpts;) throws java.io.IOException":{"name":"createWriter","returnType":"org.apache.hadoop.io.SequenceFile$Writer","args":["org.apache.hadoop.fs.FileContext","org.apache.hadoop.conf.Configuration","org.apache.hadoop.fs.Path","java.lang.Class","java.lang.Class","org.apache.hadoop.io.SequenceFile$CompressionType","org.apache.hadoop.io.compress.CompressionCodec","org.apache.hadoop.io.SequenceFile$Metadata","java.util.EnumSet","[Lorg.apache.hadoop.fs.Options$CreateOpts;"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.io.SequenceFile$Writer createWriter(org.apache.hadoop.conf.Configuration, [Lorg.apache.hadoop.io.SequenceFile$Writer$Option;) throws java.io.IOException":{"name":"createWriter","returnType":"org.apache.hadoop.io.SequenceFile$Writer","args":["org.apache.hadoop.conf.Configuration","[Lorg.apache.hadoop.io.SequenceFile$Writer$Option;"],"exceptions":["java.io.IOException"]},"void setDefaultCompressionType(org.apache.hadoop.conf.Configuration, org.apache.hadoop.io.SequenceFile$CompressionType)":{"name":"setDefaultCompressionType","returnType":"void","args":["org.apache.hadoop.conf.Configuration","org.apache.hadoop.io.SequenceFile$CompressionType"],"exceptions":[]},"org.apache.hadoop.io.SequenceFile$Writer createWriter(org.apache.hadoop.fs.FileSystem, org.apache.hadoop.conf.Configuration, org.apache.hadoop.fs.Path, java.lang.Class, java.lang.Class, int, short, long, org.apache.hadoop.io.SequenceFile$CompressionType, org.apache.hadoop.io.compress.CompressionCodec, org.apache.hadoop.util.Progressable, org.apache.hadoop.io.SequenceFile$Metadata) throws java.io.IOException":{"name":"createWriter","returnType":"org.apache.hadoop.io.SequenceFile$Writer","args":["org.apache.hadoop.fs.FileSystem","org.apache.hadoop.conf.Configuration","org.apache.hadoop.fs.Path","java.lang.Class","java.lang.Class","int","short","long","org.apache.hadoop.io.SequenceFile$CompressionType","org.apache.hadoop.io.compress.CompressionCodec","org.apache.hadoop.util.Progressable","org.apache.hadoop.io.SequenceFile$Metadata"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.io.SequenceFile$Writer createWriter(org.apache.hadoop.conf.Configuration, org.apache.hadoop.fs.FSDataOutputStream, java.lang.Class, java.lang.Class, org.apache.hadoop.io.SequenceFile$CompressionType, org.apache.hadoop.io.compress.CompressionCodec, org.apache.hadoop.io.SequenceFile$Metadata) throws java.io.IOException":{"name":"createWriter","returnType":"org.apache.hadoop.io.SequenceFile$Writer","args":["org.apache.hadoop.conf.Configuration","org.apache.hadoop.fs.FSDataOutputStream","java.lang.Class","java.lang.Class","org.apache.hadoop.io.SequenceFile$CompressionType","org.apache.hadoop.io.compress.CompressionCodec","org.apache.hadoop.io.SequenceFile$Metadata"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.io.SequenceFile$Writer createWriter(org.apache.hadoop.conf.Configuration, org.apache.hadoop.fs.FSDataOutputStream, java.lang.Class, java.lang.Class, org.apache.hadoop.io.SequenceFile$CompressionType, org.apache.hadoop.io.compress.CompressionCodec) throws java.io.IOException":{"name":"createWriter","returnType":"org.apache.hadoop.io.SequenceFile$Writer","args":["org.apache.hadoop.conf.Configuration","org.apache.hadoop.fs.FSDataOutputStream","java.lang.Class","java.lang.Class","org.apache.hadoop.io.SequenceFile$CompressionType","org.apache.hadoop.io.compress.CompressionCodec"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.io.SequenceFile$Writer createWriter(org.apache.hadoop.fs.FileSystem, org.apache.hadoop.conf.Configuration, org.apache.hadoop.fs.Path, java.lang.Class, java.lang.Class, org.apache.hadoop.io.SequenceFile$CompressionType, org.apache.hadoop.io.compress.CompressionCodec, org.apache.hadoop.util.Progressable) throws java.io.IOException":{"name":"createWriter","returnType":"org.apache.hadoop.io.SequenceFile$Writer","args":["org.apache.hadoop.fs.FileSystem","org.apache.hadoop.conf.Configuration","org.apache.hadoop.fs.Path","java.lang.Class","java.lang.Class","org.apache.hadoop.io.SequenceFile$CompressionType","org.apache.hadoop.io.compress.CompressionCodec","org.apache.hadoop.util.Progressable"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.io.SequenceFile$Writer createWriter(org.apache.hadoop.fs.FileSystem, org.apache.hadoop.conf.Configuration, org.apache.hadoop.fs.Path, java.lang.Class, java.lang.Class, int, short, long, boolean, org.apache.hadoop.io.SequenceFile$CompressionType, org.apache.hadoop.io.compress.CompressionCodec, org.apache.hadoop.io.SequenceFile$Metadata) throws java.io.IOException":{"name":"createWriter","returnType":"org.apache.hadoop.io.SequenceFile$Writer","args":["org.apache.hadoop.fs.FileSystem","org.apache.hadoop.conf.Configuration","org.apache.hadoop.fs.Path","java.lang.Class","java.lang.Class","int","short","long","boolean","org.apache.hadoop.io.SequenceFile$CompressionType","org.apache.hadoop.io.compress.CompressionCodec","org.apache.hadoop.io.SequenceFile$Metadata"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.io.SequenceFile$Writer createWriter(org.apache.hadoop.fs.FileSystem, org.apache.hadoop.conf.Configuration, org.apache.hadoop.fs.Path, java.lang.Class, java.lang.Class) throws java.io.IOException":{"name":"createWriter","returnType":"org.apache.hadoop.io.SequenceFile$Writer","args":["org.apache.hadoop.fs.FileSystem","org.apache.hadoop.conf.Configuration","org.apache.hadoop.fs.Path","java.lang.Class","java.lang.Class"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.io.SequenceFile$Writer createWriter(org.apache.hadoop.fs.FileSystem, org.apache.hadoop.conf.Configuration, org.apache.hadoop.fs.Path, java.lang.Class, java.lang.Class, org.apache.hadoop.io.SequenceFile$CompressionType, org.apache.hadoop.util.Progressable) throws java.io.IOException":{"name":"createWriter","returnType":"org.apache.hadoop.io.SequenceFile$Writer","args":["org.apache.hadoop.fs.FileSystem","org.apache.hadoop.conf.Configuration","org.apache.hadoop.fs.Path","java.lang.Class","java.lang.Class","org.apache.hadoop.io.SequenceFile$CompressionType","org.apache.hadoop.util.Progressable"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.io.SequenceFile$Writer createWriter(org.apache.hadoop.fs.FileSystem, org.apache.hadoop.conf.Configuration, org.apache.hadoop.fs.Path, java.lang.Class, java.lang.Class, org.apache.hadoop.io.SequenceFile$CompressionType, org.apache.hadoop.io.compress.CompressionCodec, org.apache.hadoop.util.Progressable, org.apache.hadoop.io.SequenceFile$Metadata) throws java.io.IOException":{"name":"createWriter","returnType":"org.apache.hadoop.io.SequenceFile$Writer","args":["org.apache.hadoop.fs.FileSystem","org.apache.hadoop.conf.Configuration","org.apache.hadoop.fs.Path","java.lang.Class","java.lang.Class","org.apache.hadoop.io.SequenceFile$CompressionType","org.apache.hadoop.io.compress.CompressionCodec","org.apache.hadoop.util.Progressable","org.apache.hadoop.io.SequenceFile$Metadata"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.io.SequenceFile$CompressionType getDefaultCompressionType(org.apache.hadoop.conf.Configuration)":{"name":"getDefaultCompressionType","returnType":"org.apache.hadoop.io.SequenceFile$CompressionType","args":["org.apache.hadoop.conf.Configuration"],"exceptions":[]},"org.apache.hadoop.io.SequenceFile$Writer createWriter(org.apache.hadoop.fs.FileSystem, org.apache.hadoop.conf.Configuration, org.apache.hadoop.fs.Path, java.lang.Class, java.lang.Class, org.apache.hadoop.io.SequenceFile$CompressionType, org.apache.hadoop.io.compress.CompressionCodec) throws java.io.IOException":{"name":"createWriter","returnType":"org.apache.hadoop.io.SequenceFile$Writer","args":["org.apache.hadoop.fs.FileSystem","org.apache.hadoop.conf.Configuration","org.apache.hadoop.fs.Path","java.lang.Class","java.lang.Class","org.apache.hadoop.io.SequenceFile$CompressionType","org.apache.hadoop.io.compress.CompressionCodec"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.io.file.tfile.MetaBlockAlreadyExists":{"name":"org.apache.hadoop.io.file.tfile.MetaBlockAlreadyExists","methods":{}},"org.apache.hadoop.fs.FileStatus":{"name":"org.apache.hadoop.fs.FileStatus","methods":{"org.apache.hadoop.fs.permission.FsPermission getPermission()":{"name":"getPermission","returnType":"org.apache.hadoop.fs.permission.FsPermission","args":[],"exceptions":[]},"int hashCode()":{"name":"hashCode","returnType":"int","args":[],"exceptions":[]},"boolean isFile()":{"name":"isFile","returnType":"boolean","args":[],"exceptions":[]},"long getBlockSize()":{"name":"getBlockSize","returnType":"long","args":[],"exceptions":[]},"java.lang.String getOwner()":{"name":"getOwner","returnType":"java.lang.String","args":[],"exceptions":[]},"org.apache.hadoop.fs.Path getPath()":{"name":"getPath","returnType":"org.apache.hadoop.fs.Path","args":[],"exceptions":[]},"void setSymlink(org.apache.hadoop.fs.Path)":{"name":"setSymlink","returnType":"void","args":["org.apache.hadoop.fs.Path"],"exceptions":[]},"int compareTo(java.lang.Object)":{"name":"compareTo","returnType":"int","args":["java.lang.Object"],"exceptions":[]},"long getAccessTime()":{"name":"getAccessTime","returnType":"long","args":[],"exceptions":[]},"boolean isDir()":{"name":"isDir","returnType":"boolean","args":[],"exceptions":[]},"boolean equals(java.lang.Object)":{"name":"equals","returnType":"boolean","args":["java.lang.Object"],"exceptions":[]},"boolean isEncrypted()":{"name":"isEncrypted","returnType":"boolean","args":[],"exceptions":[]},"long getLen()":{"name":"getLen","returnType":"long","args":[],"exceptions":[]},"java.lang.String toString()":{"name":"toString","returnType":"java.lang.String","args":[],"exceptions":[]},"void write(java.io.DataOutput) throws java.io.IOException":{"name":"write","returnType":"void","args":["java.io.DataOutput"],"exceptions":["java.io.IOException"]},"void setPath(org.apache.hadoop.fs.Path)":{"name":"setPath","returnType":"void","args":["org.apache.hadoop.fs.Path"],"exceptions":[]},"org.apache.hadoop.fs.Path getSymlink() throws java.io.IOException":{"name":"getSymlink","returnType":"org.apache.hadoop.fs.Path","args":[],"exceptions":["java.io.IOException"]},"short getReplication()":{"name":"getReplication","returnType":"short","args":[],"exceptions":[]},"boolean isDirectory()":{"name":"isDirectory","returnType":"boolean","args":[],"exceptions":[]},"java.lang.String getGroup()":{"name":"getGroup","returnType":"java.lang.String","args":[],"exceptions":[]},"boolean isSymlink()":{"name":"isSymlink","returnType":"boolean","args":[],"exceptions":[]},"void readFields(java.io.DataInput) throws java.io.IOException":{"name":"readFields","returnType":"void","args":["java.io.DataInput"],"exceptions":["java.io.IOException"]},"long getModificationTime()":{"name":"getModificationTime","returnType":"long","args":[],"exceptions":[]}}},"org.apache.hadoop.util.PureJavaCrc32":{"name":"org.apache.hadoop.util.PureJavaCrc32","methods":{"void update([B, int, int)":{"name":"update","returnType":"void","args":["[B","int","int"],"exceptions":[]},"long getValue()":{"name":"getValue","returnType":"long","args":[],"exceptions":[]},"void reset()":{"name":"reset","returnType":"void","args":[],"exceptions":[]},"void update(int)":{"name":"update","returnType":"void","args":["int"],"exceptions":[]}}},"org.apache.hadoop.fs.Trash":{"name":"org.apache.hadoop.fs.Trash","methods":{"java.lang.Runnable getEmptier() throws java.io.IOException":{"name":"getEmptier","returnType":"java.lang.Runnable","args":[],"exceptions":["java.io.IOException"]},"boolean moveToTrash(org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"moveToTrash","returnType":"boolean","args":["org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"void expunge() throws java.io.IOException":{"name":"expunge","returnType":"void","args":[],"exceptions":["java.io.IOException"]},"boolean moveToAppropriateTrash(org.apache.hadoop.fs.FileSystem, org.apache.hadoop.fs.Path, org.apache.hadoop.conf.Configuration) throws java.io.IOException":{"name":"moveToAppropriateTrash","returnType":"boolean","args":["org.apache.hadoop.fs.FileSystem","org.apache.hadoop.fs.Path","org.apache.hadoop.conf.Configuration"],"exceptions":["java.io.IOException"]},"void checkpoint() throws java.io.IOException":{"name":"checkpoint","returnType":"void","args":[],"exceptions":["java.io.IOException"]},"boolean isEnabled()":{"name":"isEnabled","returnType":"boolean","args":[],"exceptions":[]}}},"org.apache.hadoop.record.RecordComparator":{"name":"org.apache.hadoop.record.RecordComparator","methods":{"int compare([B, int, int, [B, int, int)":{"name":"compare","returnType":"int","args":["[B","int","int","[B","int","int"],"exceptions":[]},"void define(java.lang.Class, org.apache.hadoop.record.RecordComparator)":{"name":"define","returnType":"void","args":["java.lang.Class","org.apache.hadoop.record.RecordComparator"],"exceptions":[]}}},"org.apache.hadoop.record.meta.RecordTypeInfo":{"name":"org.apache.hadoop.record.meta.RecordTypeInfo","methods":{"void setName(java.lang.String)":{"name":"setName","returnType":"void","args":["java.lang.String"],"exceptions":[]},"java.lang.String getName()":{"name":"getName","returnType":"java.lang.String","args":[],"exceptions":[]},"java.util.Collection getFieldTypeInfos()":{"name":"getFieldTypeInfos","returnType":"java.util.Collection","args":[],"exceptions":[]},"void serialize(org.apache.hadoop.record.RecordOutput, java.lang.String) throws java.io.IOException":{"name":"serialize","returnType":"void","args":["org.apache.hadoop.record.RecordOutput","java.lang.String"],"exceptions":["java.io.IOException"]},"void deserialize(org.apache.hadoop.record.RecordInput, java.lang.String) throws java.io.IOException":{"name":"deserialize","returnType":"void","args":["org.apache.hadoop.record.RecordInput","java.lang.String"],"exceptions":["java.io.IOException"]},"void addField(java.lang.String, org.apache.hadoop.record.meta.TypeID)":{"name":"addField","returnType":"void","args":["java.lang.String","org.apache.hadoop.record.meta.TypeID"],"exceptions":[]},"int compareTo(java.lang.Object) throws java.lang.ClassCastException":{"name":"compareTo","returnType":"int","args":["java.lang.Object"],"exceptions":["java.lang.ClassCastException"]},"org.apache.hadoop.record.meta.RecordTypeInfo getNestedStructTypeInfo(java.lang.String)":{"name":"getNestedStructTypeInfo","returnType":"org.apache.hadoop.record.meta.RecordTypeInfo","args":["java.lang.String"],"exceptions":[]}}},"org.apache.hadoop.conf.Configuration":{"name":"org.apache.hadoop.conf.Configuration","methods":{"void addResource(org.apache.hadoop.fs.Path)":{"name":"addResource","returnType":"void","args":["org.apache.hadoop.fs.Path"],"exceptions":[]},"java.util.Set getFinalParameters()":{"name":"getFinalParameters","returnType":"java.util.Set","args":[],"exceptions":[]},"java.lang.String getTrimmed(java.lang.String, java.lang.String)":{"name":"getTrimmed","returnType":"java.lang.String","args":["java.lang.String","java.lang.String"],"exceptions":[]},"void setPattern(java.lang.String, java.util.regex.Pattern)":{"name":"setPattern","returnType":"void","args":["java.lang.String","java.util.regex.Pattern"],"exceptions":[]},"int size()":{"name":"size","returnType":"int","args":[],"exceptions":[]},"void addResource(org.apache.hadoop.conf.Configuration)":{"name":"addResource","returnType":"void","args":["org.apache.hadoop.conf.Configuration"],"exceptions":[]},"java.util.List getInstances(java.lang.String, java.lang.Class)":{"name":"getInstances","returnType":"java.util.List","args":["java.lang.String","java.lang.Class"],"exceptions":[]},"void addResource(java.net.URL)":{"name":"addResource","returnType":"void","args":["java.net.URL"],"exceptions":[]},"void setFloat(java.lang.String, float)":{"name":"setFloat","returnType":"void","args":["java.lang.String","float"],"exceptions":[]},"void set(java.lang.String, java.lang.String, java.lang.String)":{"name":"set","returnType":"void","args":["java.lang.String","java.lang.String","java.lang.String"],"exceptions":[]},"void setBooleanIfUnset(java.lang.String, boolean)":{"name":"setBooleanIfUnset","returnType":"void","args":["java.lang.String","boolean"],"exceptions":[]},"void reloadConfiguration()":{"name":"reloadConfiguration","returnType":"void","args":[],"exceptions":[]},"java.util.regex.Pattern getPattern(java.lang.String, java.util.regex.Pattern)":{"name":"getPattern","returnType":"java.util.regex.Pattern","args":["java.lang.String","java.util.regex.Pattern"],"exceptions":[]},"java.net.InetSocketAddress updateConnectAddr(java.lang.String, java.net.InetSocketAddress)":{"name":"updateConnectAddr","returnType":"java.net.InetSocketAddress","args":["java.lang.String","java.net.InetSocketAddress"],"exceptions":[]},"java.lang.String get(java.lang.String, java.lang.String)":{"name":"get","returnType":"java.lang.String","args":["java.lang.String","java.lang.String"],"exceptions":[]},"void setDeprecatedProperties()":{"name":"setDeprecatedProperties","returnType":"void","args":[],"exceptions":[]},"boolean onlyKeyExists(java.lang.String)":{"name":"onlyKeyExists","returnType":"boolean","args":["java.lang.String"],"exceptions":[]},"java.util.Iterator iterator()":{"name":"iterator","returnType":"java.util.Iterator","args":[],"exceptions":[]},"org.apache.hadoop.fs.Path getLocalPath(java.lang.String, java.lang.String) throws java.io.IOException":{"name":"getLocalPath","returnType":"org.apache.hadoop.fs.Path","args":["java.lang.String","java.lang.String"],"exceptions":["java.io.IOException"]},"java.lang.Class getClassByName(java.lang.String) throws java.lang.ClassNotFoundException":{"name":"getClassByName","returnType":"java.lang.Class","args":["java.lang.String"],"exceptions":["java.lang.ClassNotFoundException"]},"java.io.InputStream getConfResourceAsInputStream(java.lang.String)":{"name":"getConfResourceAsInputStream","returnType":"java.io.InputStream","args":["java.lang.String"],"exceptions":[]},"[Ljava.lang.String; getTrimmedStrings(java.lang.String)":{"name":"getTrimmedStrings","returnType":"[Ljava.lang.String;","args":["java.lang.String"],"exceptions":[]},"void writeXml(java.io.Writer) throws java.io.IOException":{"name":"writeXml","returnType":"void","args":["java.io.Writer"],"exceptions":["java.io.IOException"]},"void clear()":{"name":"clear","returnType":"void","args":[],"exceptions":[]},"java.net.URL getResource(java.lang.String)":{"name":"getResource","returnType":"java.net.URL","args":["java.lang.String"],"exceptions":[]},"java.net.InetSocketAddress updateConnectAddr(java.lang.String, java.lang.String, java.lang.String, java.net.InetSocketAddress)":{"name":"updateConnectAddr","returnType":"java.net.InetSocketAddress","args":["java.lang.String","java.lang.String","java.lang.String","java.net.InetSocketAddress"],"exceptions":[]},"boolean getBoolean(java.lang.String, boolean)":{"name":"getBoolean","returnType":"boolean","args":["java.lang.String","boolean"],"exceptions":[]},"void main([Ljava.lang.String;) throws java.lang.Exception":{"name":"main","returnType":"void","args":["[Ljava.lang.String;"],"exceptions":["java.lang.Exception"]},"java.lang.Enum getEnum(java.lang.String, java.lang.Enum)":{"name":"getEnum","returnType":"java.lang.Enum","args":["java.lang.String","java.lang.Enum"],"exceptions":[]},"void set(java.lang.String, java.lang.String)":{"name":"set","returnType":"void","args":["java.lang.String","java.lang.String"],"exceptions":[]},"void setEnum(java.lang.String, java.lang.Enum)":{"name":"setEnum","returnType":"void","args":["java.lang.String","java.lang.Enum"],"exceptions":[]},"void addDeprecation(java.lang.String, java.lang.String)":{"name":"addDeprecation","returnType":"void","args":["java.lang.String","java.lang.String"],"exceptions":[]},"[Ljava.lang.Class; getClasses(java.lang.String, [Ljava.lang.Class;)":{"name":"getClasses","returnType":"[Ljava.lang.Class;","args":["java.lang.String","[Ljava.lang.Class;"],"exceptions":[]},"float getFloat(java.lang.String, float)":{"name":"getFloat","returnType":"float","args":["java.lang.String","float"],"exceptions":[]},"long getLongBytes(java.lang.String, long)":{"name":"getLongBytes","returnType":"long","args":["java.lang.String","long"],"exceptions":[]},"java.lang.Class getClassByNameOrNull(java.lang.String)":{"name":"getClassByNameOrNull","returnType":"java.lang.Class","args":["java.lang.String"],"exceptions":[]},"void setStrings(java.lang.String, [Ljava.lang.String;)":{"name":"setStrings","returnType":"void","args":["java.lang.String","[Ljava.lang.String;"],"exceptions":[]},"void addDeprecations([Lorg.apache.hadoop.conf.Configuration$DeprecationDelta;)":{"name":"addDeprecations","returnType":"void","args":["[Lorg.apache.hadoop.conf.Configuration$DeprecationDelta;"],"exceptions":[]},"[Ljava.lang.String; getPropertySources(java.lang.String)":{"name":"getPropertySources","returnType":"[Ljava.lang.String;","args":["java.lang.String"],"exceptions":[]},"org.apache.hadoop.conf.Configuration$IntegerRanges getRange(java.lang.String, java.lang.String)":{"name":"getRange","returnType":"org.apache.hadoop.conf.Configuration$IntegerRanges","args":["java.lang.String","java.lang.String"],"exceptions":[]},"void setLong(java.lang.String, long)":{"name":"setLong","returnType":"void","args":["java.lang.String","long"],"exceptions":[]},"void setQuietMode(boolean)":{"name":"setQuietMode","returnType":"void","args":["boolean"],"exceptions":[]},"void setClassLoader(java.lang.ClassLoader)":{"name":"setClassLoader","returnType":"void","args":["java.lang.ClassLoader"],"exceptions":[]},"[C getPassword(java.lang.String) throws java.io.IOException":{"name":"getPassword","returnType":"[C","args":["java.lang.String"],"exceptions":["java.io.IOException"]},"void setTimeDuration(java.lang.String, long, java.util.concurrent.TimeUnit)":{"name":"setTimeDuration","returnType":"void","args":["java.lang.String","long","java.util.concurrent.TimeUnit"],"exceptions":[]},"void setDouble(java.lang.String, double)":{"name":"setDouble","returnType":"void","args":["java.lang.String","double"],"exceptions":[]},"void addDeprecation(java.lang.String, [Ljava.lang.String;, java.lang.String)":{"name":"addDeprecation","returnType":"void","args":["java.lang.String","[Ljava.lang.String;","java.lang.String"],"exceptions":[]},"java.lang.String get(java.lang.String)":{"name":"get","returnType":"java.lang.String","args":["java.lang.String"],"exceptions":[]},"java.lang.Class getClass(java.lang.String, java.lang.Class)":{"name":"getClass","returnType":"java.lang.Class","args":["java.lang.String","java.lang.Class"],"exceptions":[]},"void setClass(java.lang.String, java.lang.Class, java.lang.Class)":{"name":"setClass","returnType":"void","args":["java.lang.String","java.lang.Class","java.lang.Class"],"exceptions":[]},"java.util.Collection getStringCollection(java.lang.String)":{"name":"getStringCollection","returnType":"java.util.Collection","args":["java.lang.String"],"exceptions":[]},"java.lang.String toString()":{"name":"toString","returnType":"java.lang.String","args":[],"exceptions":[]},"void write(java.io.DataOutput) throws java.io.IOException":{"name":"write","returnType":"void","args":["java.io.DataOutput"],"exceptions":["java.io.IOException"]},"java.io.File getFile(java.lang.String, java.lang.String) throws java.io.IOException":{"name":"getFile","returnType":"java.io.File","args":["java.lang.String","java.lang.String"],"exceptions":["java.io.IOException"]},"double getDouble(java.lang.String, double)":{"name":"getDouble","returnType":"double","args":["java.lang.String","double"],"exceptions":[]},"void setBoolean(java.lang.String, boolean)":{"name":"setBoolean","returnType":"void","args":["java.lang.String","boolean"],"exceptions":[]},"boolean isDeprecated(java.lang.String)":{"name":"isDeprecated","returnType":"boolean","args":["java.lang.String"],"exceptions":[]},"java.lang.String getTrimmed(java.lang.String)":{"name":"getTrimmed","returnType":"java.lang.String","args":["java.lang.String"],"exceptions":[]},"void setInt(java.lang.String, int)":{"name":"setInt","returnType":"void","args":["java.lang.String","int"],"exceptions":[]},"void addDeprecation(java.lang.String, java.lang.String, java.lang.String)":{"name":"addDeprecation","returnType":"void","args":["java.lang.String","java.lang.String","java.lang.String"],"exceptions":[]},"long getLong(java.lang.String, long)":{"name":"getLong","returnType":"long","args":["java.lang.String","long"],"exceptions":[]},"void addDeprecation(java.lang.String, [Ljava.lang.String;)":{"name":"addDeprecation","returnType":"void","args":["java.lang.String","[Ljava.lang.String;"],"exceptions":[]},"void setAllowNullValueProperties(boolean)":{"name":"setAllowNullValueProperties","returnType":"void","args":["boolean"],"exceptions":[]},"java.util.Collection getTrimmedStringCollection(java.lang.String)":{"name":"getTrimmedStringCollection","returnType":"java.util.Collection","args":["java.lang.String"],"exceptions":[]},"[Ljava.lang.String; getStrings(java.lang.String, [Ljava.lang.String;)":{"name":"getStrings","returnType":"[Ljava.lang.String;","args":["java.lang.String","[Ljava.lang.String;"],"exceptions":[]},"java.io.Reader getConfResourceAsReader(java.lang.String)":{"name":"getConfResourceAsReader","returnType":"java.io.Reader","args":["java.lang.String"],"exceptions":[]},"long getTimeDuration(java.lang.String, long, java.util.concurrent.TimeUnit)":{"name":"getTimeDuration","returnType":"long","args":["java.lang.String","long","java.util.concurrent.TimeUnit"],"exceptions":[]},"void addResource(java.io.InputStream)":{"name":"addResource","returnType":"void","args":["java.io.InputStream"],"exceptions":[]},"java.net.InetSocketAddress getSocketAddr(java.lang.String, java.lang.String, java.lang.String, int)":{"name":"getSocketAddr","returnType":"java.net.InetSocketAddress","args":["java.lang.String","java.lang.String","java.lang.String","int"],"exceptions":[]},"void dumpDeprecatedKeys()":{"name":"dumpDeprecatedKeys","returnType":"void","args":[],"exceptions":[]},"[I getInts(java.lang.String)":{"name":"getInts","returnType":"[I","args":["java.lang.String"],"exceptions":[]},"void addResource(java.lang.String)":{"name":"addResource","returnType":"void","args":["java.lang.String"],"exceptions":[]},"[Ljava.lang.String; getTrimmedStrings(java.lang.String, [Ljava.lang.String;)":{"name":"getTrimmedStrings","returnType":"[Ljava.lang.String;","args":["java.lang.String","[Ljava.lang.String;"],"exceptions":[]},"java.lang.Class getClass(java.lang.String, java.lang.Class, java.lang.Class)":{"name":"getClass","returnType":"java.lang.Class","args":["java.lang.String","java.lang.Class","java.lang.Class"],"exceptions":[]},"void setIfUnset(java.lang.String, java.lang.String)":{"name":"setIfUnset","returnType":"void","args":["java.lang.String","java.lang.String"],"exceptions":[]},"void unset(java.lang.String)":{"name":"unset","returnType":"void","args":["java.lang.String"],"exceptions":[]},"void dumpConfiguration(org.apache.hadoop.conf.Configuration, java.io.Writer) throws java.io.IOException":{"name":"dumpConfiguration","returnType":"void","args":["org.apache.hadoop.conf.Configuration","java.io.Writer"],"exceptions":["java.io.IOException"]},"[Ljava.lang.String; getStrings(java.lang.String)":{"name":"getStrings","returnType":"[Ljava.lang.String;","args":["java.lang.String"],"exceptions":[]},"void addResource(java.io.InputStream, java.lang.String)":{"name":"addResource","returnType":"void","args":["java.io.InputStream","java.lang.String"],"exceptions":[]},"java.util.Map getValByRegex(java.lang.String)":{"name":"getValByRegex","returnType":"java.util.Map","args":["java.lang.String"],"exceptions":[]},"void setSocketAddr(java.lang.String, java.net.InetSocketAddress)":{"name":"setSocketAddr","returnType":"void","args":["java.lang.String","java.net.InetSocketAddress"],"exceptions":[]},"int getInt(java.lang.String, int)":{"name":"getInt","returnType":"int","args":["java.lang.String","int"],"exceptions":[]},"void writeXml(java.io.OutputStream) throws java.io.IOException":{"name":"writeXml","returnType":"void","args":["java.io.OutputStream"],"exceptions":["java.io.IOException"]},"java.lang.ClassLoader getClassLoader()":{"name":"getClassLoader","returnType":"java.lang.ClassLoader","args":[],"exceptions":[]},"void addDefaultResource(java.lang.String)":{"name":"addDefaultResource","returnType":"void","args":["java.lang.String"],"exceptions":[]},"java.net.InetSocketAddress getSocketAddr(java.lang.String, java.lang.String, int)":{"name":"getSocketAddr","returnType":"java.net.InetSocketAddress","args":["java.lang.String","java.lang.String","int"],"exceptions":[]},"boolean hasWarnedDeprecation(java.lang.String)":{"name":"hasWarnedDeprecation","returnType":"boolean","args":["java.lang.String"],"exceptions":[]},"java.lang.String getRaw(java.lang.String)":{"name":"getRaw","returnType":"java.lang.String","args":["java.lang.String"],"exceptions":[]},"void readFields(java.io.DataInput) throws java.io.IOException":{"name":"readFields","returnType":"void","args":["java.io.DataInput"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.io.WritableFactories":{"name":"org.apache.hadoop.io.WritableFactories","methods":{"org.apache.hadoop.io.WritableFactory getFactory(java.lang.Class)":{"name":"getFactory","returnType":"org.apache.hadoop.io.WritableFactory","args":["java.lang.Class"],"exceptions":[]},"void setFactory(java.lang.Class, org.apache.hadoop.io.WritableFactory)":{"name":"setFactory","returnType":"void","args":["java.lang.Class","org.apache.hadoop.io.WritableFactory"],"exceptions":[]},"org.apache.hadoop.io.Writable newInstance(java.lang.Class, org.apache.hadoop.conf.Configuration)":{"name":"newInstance","returnType":"org.apache.hadoop.io.Writable","args":["java.lang.Class","org.apache.hadoop.conf.Configuration"],"exceptions":[]},"org.apache.hadoop.io.Writable newInstance(java.lang.Class)":{"name":"newInstance","returnType":"org.apache.hadoop.io.Writable","args":["java.lang.Class"],"exceptions":[]}}},"org.apache.hadoop.io.SetFile":{"name":"org.apache.hadoop.io.SetFile","methods":{}},"org.apache.hadoop.record.compiler.JString":{"name":"org.apache.hadoop.record.compiler.JString","methods":{}},"org.apache.hadoop.record.compiler.JBoolean":{"name":"org.apache.hadoop.record.compiler.JBoolean","methods":{}},"org.apache.hadoop.io.ShortWritable":{"name":"org.apache.hadoop.io.ShortWritable","methods":{"int hashCode()":{"name":"hashCode","returnType":"int","args":[],"exceptions":[]},"int compareTo(org.apache.hadoop.io.ShortWritable)":{"name":"compareTo","returnType":"int","args":["org.apache.hadoop.io.ShortWritable"],"exceptions":[]},"java.lang.String toString()":{"name":"toString","returnType":"java.lang.String","args":[],"exceptions":[]},"void write(java.io.DataOutput) throws java.io.IOException":{"name":"write","returnType":"void","args":["java.io.DataOutput"],"exceptions":["java.io.IOException"]},"int compareTo(java.lang.Object)":{"name":"compareTo","returnType":"int","args":["java.lang.Object"],"exceptions":[]},"short get()":{"name":"get","returnType":"short","args":[],"exceptions":[]},"boolean equals(java.lang.Object)":{"name":"equals","returnType":"boolean","args":["java.lang.Object"],"exceptions":[]},"void set(short)":{"name":"set","returnType":"void","args":["short"],"exceptions":[]},"void readFields(java.io.DataInput) throws java.io.IOException":{"name":"readFields","returnType":"void","args":["java.io.DataInput"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.fs.InvalidPathException":{"name":"org.apache.hadoop.fs.InvalidPathException","methods":{}},"org.apache.hadoop.record.compiler.JVector":{"name":"org.apache.hadoop.record.compiler.JVector","methods":{}},"org.apache.hadoop.io.ArrayWritable":{"name":"org.apache.hadoop.io.ArrayWritable","methods":{"void write(java.io.DataOutput) throws java.io.IOException":{"name":"write","returnType":"void","args":["java.io.DataOutput"],"exceptions":["java.io.IOException"]},"[Lorg.apache.hadoop.io.Writable; get()":{"name":"get","returnType":"[Lorg.apache.hadoop.io.Writable;","args":[],"exceptions":[]},"void set([Lorg.apache.hadoop.io.Writable;)":{"name":"set","returnType":"void","args":["[Lorg.apache.hadoop.io.Writable;"],"exceptions":[]},"[Ljava.lang.String; toStrings()":{"name":"toStrings","returnType":"[Ljava.lang.String;","args":[],"exceptions":[]},"java.lang.Class getValueClass()":{"name":"getValueClass","returnType":"java.lang.Class","args":[],"exceptions":[]},"java.lang.Object toArray()":{"name":"toArray","returnType":"java.lang.Object","args":[],"exceptions":[]},"void readFields(java.io.DataInput) throws java.io.IOException":{"name":"readFields","returnType":"void","args":["java.io.DataInput"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.io.IntWritable":{"name":"org.apache.hadoop.io.IntWritable","methods":{"int hashCode()":{"name":"hashCode","returnType":"int","args":[],"exceptions":[]},"java.lang.String toString()":{"name":"toString","returnType":"java.lang.String","args":[],"exceptions":[]},"void write(java.io.DataOutput) throws java.io.IOException":{"name":"write","returnType":"void","args":["java.io.DataOutput"],"exceptions":["java.io.IOException"]},"void set(int)":{"name":"set","returnType":"void","args":["int"],"exceptions":[]},"int compareTo(org.apache.hadoop.io.IntWritable)":{"name":"compareTo","returnType":"int","args":["org.apache.hadoop.io.IntWritable"],"exceptions":[]},"int get()":{"name":"get","returnType":"int","args":[],"exceptions":[]},"int compareTo(java.lang.Object)":{"name":"compareTo","returnType":"int","args":["java.lang.Object"],"exceptions":[]},"boolean equals(java.lang.Object)":{"name":"equals","returnType":"boolean","args":["java.lang.Object"],"exceptions":[]},"void readFields(java.io.DataInput) throws java.io.IOException":{"name":"readFields","returnType":"void","args":["java.io.DataInput"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.io.TwoDArrayWritable":{"name":"org.apache.hadoop.io.TwoDArrayWritable","methods":{"[[Lorg.apache.hadoop.io.Writable; get()":{"name":"get","returnType":"[[Lorg.apache.hadoop.io.Writable;","args":[],"exceptions":[]},"void write(java.io.DataOutput) throws java.io.IOException":{"name":"write","returnType":"void","args":["java.io.DataOutput"],"exceptions":["java.io.IOException"]},"void set([[Lorg.apache.hadoop.io.Writable;)":{"name":"set","returnType":"void","args":["[[Lorg.apache.hadoop.io.Writable;"],"exceptions":[]},"java.lang.Object toArray()":{"name":"toArray","returnType":"java.lang.Object","args":[],"exceptions":[]},"void readFields(java.io.DataInput) throws java.io.IOException":{"name":"readFields","returnType":"void","args":["java.io.DataInput"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.fs.FSDataInputStream":{"name":"org.apache.hadoop.fs.FSDataInputStream","methods":{"void readFully(long, [B) throws java.io.IOException":{"name":"readFully","returnType":"void","args":["long","[B"],"exceptions":["java.io.IOException"]},"java.nio.ByteBuffer read(org.apache.hadoop.io.ByteBufferPool, int) throws java.lang.UnsupportedOperationException, java.io.IOException":{"name":"read","returnType":"java.nio.ByteBuffer","args":["org.apache.hadoop.io.ByteBufferPool","int"],"exceptions":["java.lang.UnsupportedOperationException","java.io.IOException"]},"void readFully(long, [B, int, int) throws java.io.IOException":{"name":"readFully","returnType":"void","args":["long","[B","int","int"],"exceptions":["java.io.IOException"]},"void unbuffer()":{"name":"unbuffer","returnType":"void","args":[],"exceptions":[]},"void seek(long) throws java.io.IOException":{"name":"seek","returnType":"void","args":["long"],"exceptions":["java.io.IOException"]},"long getPos() throws java.io.IOException":{"name":"getPos","returnType":"long","args":[],"exceptions":["java.io.IOException"]},"void setReadahead(java.lang.Long) throws java.lang.UnsupportedOperationException, java.io.IOException":{"name":"setReadahead","returnType":"void","args":["java.lang.Long"],"exceptions":["java.lang.UnsupportedOperationException","java.io.IOException"]},"void releaseBuffer(java.nio.ByteBuffer)":{"name":"releaseBuffer","returnType":"void","args":["java.nio.ByteBuffer"],"exceptions":[]},"java.io.InputStream getWrappedStream()":{"name":"getWrappedStream","returnType":"java.io.InputStream","args":[],"exceptions":[]},"java.nio.ByteBuffer read(org.apache.hadoop.io.ByteBufferPool, int, java.util.EnumSet) throws java.lang.UnsupportedOperationException, java.io.IOException":{"name":"read","returnType":"java.nio.ByteBuffer","args":["org.apache.hadoop.io.ByteBufferPool","int","java.util.EnumSet"],"exceptions":["java.lang.UnsupportedOperationException","java.io.IOException"]},"int read(long, [B, int, int) throws java.io.IOException":{"name":"read","returnType":"int","args":["long","[B","int","int"],"exceptions":["java.io.IOException"]},"void setDropBehind(java.lang.Boolean) throws java.lang.UnsupportedOperationException, java.io.IOException":{"name":"setDropBehind","returnType":"void","args":["java.lang.Boolean"],"exceptions":["java.lang.UnsupportedOperationException","java.io.IOException"]},"int read(java.nio.ByteBuffer) throws java.io.IOException":{"name":"read","returnType":"int","args":["java.nio.ByteBuffer"],"exceptions":["java.io.IOException"]},"java.io.FileDescriptor getFileDescriptor() throws java.io.IOException":{"name":"getFileDescriptor","returnType":"java.io.FileDescriptor","args":[],"exceptions":["java.io.IOException"]},"boolean seekToNewSource(long) throws java.io.IOException":{"name":"seekToNewSource","returnType":"boolean","args":["long"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.record.compiler.JFloat":{"name":"org.apache.hadoop.record.compiler.JFloat","methods":{}},"org.apache.hadoop.record.compiler.generated.RccConstants":{"name":"org.apache.hadoop.record.compiler.generated.RccConstants","methods":{}},"org.apache.hadoop.io.ArrayPrimitiveWritable":{"name":"org.apache.hadoop.io.ArrayPrimitiveWritable","methods":{"boolean isDeclaredComponentType(java.lang.Class)":{"name":"isDeclaredComponentType","returnType":"boolean","args":["java.lang.Class"],"exceptions":[]},"void write(java.io.DataOutput) throws java.io.IOException":{"name":"write","returnType":"void","args":["java.io.DataOutput"],"exceptions":["java.io.IOException"]},"java.lang.Class getComponentType()":{"name":"getComponentType","returnType":"java.lang.Class","args":[],"exceptions":[]},"void set(java.lang.Object)":{"name":"set","returnType":"void","args":["java.lang.Object"],"exceptions":[]},"java.lang.Class getDeclaredComponentType()":{"name":"getDeclaredComponentType","returnType":"java.lang.Class","args":[],"exceptions":[]},"java.lang.Object get()":{"name":"get","returnType":"java.lang.Object","args":[],"exceptions":[]},"void readFields(java.io.DataInput) throws java.io.IOException":{"name":"readFields","returnType":"void","args":["java.io.DataInput"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.fs.FileChecksum":{"name":"org.apache.hadoop.fs.FileChecksum","methods":{"int hashCode()":{"name":"hashCode","returnType":"int","args":[],"exceptions":[]},"java.lang.String getAlgorithmName()":{"name":"getAlgorithmName","returnType":"java.lang.String","args":[],"exceptions":[]},"int getLength()":{"name":"getLength","returnType":"int","args":[],"exceptions":[]},"org.apache.hadoop.fs.Options$ChecksumOpt getChecksumOpt()":{"name":"getChecksumOpt","returnType":"org.apache.hadoop.fs.Options$ChecksumOpt","args":[],"exceptions":[]},"[B getBytes()":{"name":"getBytes","returnType":"[B","args":[],"exceptions":[]},"boolean equals(java.lang.Object)":{"name":"equals","returnType":"boolean","args":["java.lang.Object"],"exceptions":[]}}},"org.apache.hadoop.record.BinaryRecordOutput":{"name":"org.apache.hadoop.record.BinaryRecordOutput","methods":{"void writeBuffer(org.apache.hadoop.record.Buffer, java.lang.String) throws java.io.IOException":{"name":"writeBuffer","returnType":"void","args":["org.apache.hadoop.record.Buffer","java.lang.String"],"exceptions":["java.io.IOException"]},"void startMap(java.util.TreeMap, java.lang.String) throws java.io.IOException":{"name":"startMap","returnType":"void","args":["java.util.TreeMap","java.lang.String"],"exceptions":["java.io.IOException"]},"void writeBool(boolean, java.lang.String) throws java.io.IOException":{"name":"writeBool","returnType":"void","args":["boolean","java.lang.String"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.record.BinaryRecordOutput get(java.io.DataOutput)":{"name":"get","returnType":"org.apache.hadoop.record.BinaryRecordOutput","args":["java.io.DataOutput"],"exceptions":[]},"void writeFloat(float, java.lang.String) throws java.io.IOException":{"name":"writeFloat","returnType":"void","args":["float","java.lang.String"],"exceptions":["java.io.IOException"]},"void writeInt(int, java.lang.String) throws java.io.IOException":{"name":"writeInt","returnType":"void","args":["int","java.lang.String"],"exceptions":["java.io.IOException"]},"void startRecord(org.apache.hadoop.record.Record, java.lang.String) throws java.io.IOException":{"name":"startRecord","returnType":"void","args":["org.apache.hadoop.record.Record","java.lang.String"],"exceptions":["java.io.IOException"]},"void endRecord(org.apache.hadoop.record.Record, java.lang.String) throws java.io.IOException":{"name":"endRecord","returnType":"void","args":["org.apache.hadoop.record.Record","java.lang.String"],"exceptions":["java.io.IOException"]},"void endVector(java.util.ArrayList, java.lang.String) throws java.io.IOException":{"name":"endVector","returnType":"void","args":["java.util.ArrayList","java.lang.String"],"exceptions":["java.io.IOException"]},"void endMap(java.util.TreeMap, java.lang.String) throws java.io.IOException":{"name":"endMap","returnType":"void","args":["java.util.TreeMap","java.lang.String"],"exceptions":["java.io.IOException"]},"void writeByte(byte, java.lang.String) throws java.io.IOException":{"name":"writeByte","returnType":"void","args":["byte","java.lang.String"],"exceptions":["java.io.IOException"]},"void startVector(java.util.ArrayList, java.lang.String) throws java.io.IOException":{"name":"startVector","returnType":"void","args":["java.util.ArrayList","java.lang.String"],"exceptions":["java.io.IOException"]},"void writeDouble(double, java.lang.String) throws java.io.IOException":{"name":"writeDouble","returnType":"void","args":["double","java.lang.String"],"exceptions":["java.io.IOException"]},"void writeLong(long, java.lang.String) throws java.io.IOException":{"name":"writeLong","returnType":"void","args":["long","java.lang.String"],"exceptions":["java.io.IOException"]},"void writeString(java.lang.String, java.lang.String) throws java.io.IOException":{"name":"writeString","returnType":"void","args":["java.lang.String","java.lang.String"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.fs.FsConstants":{"name":"org.apache.hadoop.fs.FsConstants","methods":{}},"org.apache.hadoop.conf.ReconfigurationTaskStatus":{"name":"org.apache.hadoop.conf.ReconfigurationTaskStatus","methods":{"long getStartTime()":{"name":"getStartTime","returnType":"long","args":[],"exceptions":[]},"boolean stopped()":{"name":"stopped","returnType":"boolean","args":[],"exceptions":[]},"boolean hasTask()":{"name":"hasTask","returnType":"boolean","args":[],"exceptions":[]},"java.util.Map getStatus()":{"name":"getStatus","returnType":"java.util.Map","args":[],"exceptions":[]},"long getEndTime()":{"name":"getEndTime","returnType":"long","args":[],"exceptions":[]}}},"org.apache.hadoop.record.compiler.Consts":{"name":"org.apache.hadoop.record.compiler.Consts","methods":{}},"org.apache.hadoop.fs.ftp.FTPFileSystem":{"name":"org.apache.hadoop.fs.ftp.FTPFileSystem","methods":{"void initialize(java.net.URI, org.apache.hadoop.conf.Configuration) throws java.io.IOException":{"name":"initialize","returnType":"void","args":["java.net.URI","org.apache.hadoop.conf.Configuration"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.FSDataOutputStream append(org.apache.hadoop.fs.Path, int, org.apache.hadoop.util.Progressable) throws java.io.IOException":{"name":"append","returnType":"org.apache.hadoop.fs.FSDataOutputStream","args":["org.apache.hadoop.fs.Path","int","org.apache.hadoop.util.Progressable"],"exceptions":["java.io.IOException"]},"java.lang.String getScheme()":{"name":"getScheme","returnType":"java.lang.String","args":[],"exceptions":[]},"org.apache.hadoop.fs.FSDataInputStream open(org.apache.hadoop.fs.Path, int) throws java.io.IOException":{"name":"open","returnType":"org.apache.hadoop.fs.FSDataInputStream","args":["org.apache.hadoop.fs.Path","int"],"exceptions":["java.io.IOException"]},"java.net.URI getUri()":{"name":"getUri","returnType":"java.net.URI","args":[],"exceptions":[]},"org.apache.hadoop.fs.Path getHomeDirectory()":{"name":"getHomeDirectory","returnType":"org.apache.hadoop.fs.Path","args":[],"exceptions":[]},"boolean rename(org.apache.hadoop.fs.Path, org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"rename","returnType":"boolean","args":["org.apache.hadoop.fs.Path","org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.FileStatus getFileStatus(org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"getFileStatus","returnType":"org.apache.hadoop.fs.FileStatus","args":["org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"boolean delete(org.apache.hadoop.fs.Path, boolean) throws java.io.IOException":{"name":"delete","returnType":"boolean","args":["org.apache.hadoop.fs.Path","boolean"],"exceptions":["java.io.IOException"]},"boolean mkdirs(org.apache.hadoop.fs.Path, org.apache.hadoop.fs.permission.FsPermission) throws java.io.IOException":{"name":"mkdirs","returnType":"boolean","args":["org.apache.hadoop.fs.Path","org.apache.hadoop.fs.permission.FsPermission"],"exceptions":["java.io.IOException"]},"[Lorg.apache.hadoop.fs.FileStatus; listStatus(org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"listStatus","returnType":"[Lorg.apache.hadoop.fs.FileStatus;","args":["org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.Path getWorkingDirectory()":{"name":"getWorkingDirectory","returnType":"org.apache.hadoop.fs.Path","args":[],"exceptions":[]},"org.apache.hadoop.fs.FSDataOutputStream create(org.apache.hadoop.fs.Path, org.apache.hadoop.fs.permission.FsPermission, boolean, int, short, long, org.apache.hadoop.util.Progressable) throws java.io.IOException":{"name":"create","returnType":"org.apache.hadoop.fs.FSDataOutputStream","args":["org.apache.hadoop.fs.Path","org.apache.hadoop.fs.permission.FsPermission","boolean","int","short","long","org.apache.hadoop.util.Progressable"],"exceptions":["java.io.IOException"]},"void setWorkingDirectory(org.apache.hadoop.fs.Path)":{"name":"setWorkingDirectory","returnType":"void","args":["org.apache.hadoop.fs.Path"],"exceptions":[]}}},"org.apache.hadoop.record.compiler.generated.Rcc":{"name":"org.apache.hadoop.record.compiler.generated.Rcc","methods":{"void ReInit(java.io.InputStream)":{"name":"ReInit","returnType":"void","args":["java.io.InputStream"],"exceptions":[]},"org.apache.hadoop.record.compiler.JFile Input() throws org.apache.hadoop.record.compiler.generated.ParseException":{"name":"Input","returnType":"org.apache.hadoop.record.compiler.JFile","args":[],"exceptions":["org.apache.hadoop.record.compiler.generated.ParseException"]},"java.util.ArrayList Module() throws org.apache.hadoop.record.compiler.generated.ParseException":{"name":"Module","returnType":"java.util.ArrayList","args":[],"exceptions":["org.apache.hadoop.record.compiler.generated.ParseException"]},"org.apache.hadoop.record.compiler.JVector Vector() throws org.apache.hadoop.record.compiler.generated.ParseException":{"name":"Vector","returnType":"org.apache.hadoop.record.compiler.JVector","args":[],"exceptions":["org.apache.hadoop.record.compiler.generated.ParseException"]},"org.apache.hadoop.record.compiler.JFile Include() throws org.apache.hadoop.record.compiler.generated.ParseException":{"name":"Include","returnType":"org.apache.hadoop.record.compiler.JFile","args":[],"exceptions":["org.apache.hadoop.record.compiler.generated.ParseException"]},"java.lang.String ModuleName() throws org.apache.hadoop.record.compiler.generated.ParseException":{"name":"ModuleName","returnType":"java.lang.String","args":[],"exceptions":["org.apache.hadoop.record.compiler.generated.ParseException"]},"void main([Ljava.lang.String;)":{"name":"main","returnType":"void","args":["[Ljava.lang.String;"],"exceptions":[]},"org.apache.hadoop.record.compiler.generated.Token getToken(int)":{"name":"getToken","returnType":"org.apache.hadoop.record.compiler.generated.Token","args":["int"],"exceptions":[]},"org.apache.hadoop.record.compiler.generated.Token getNextToken()":{"name":"getNextToken","returnType":"org.apache.hadoop.record.compiler.generated.Token","args":[],"exceptions":[]},"org.apache.hadoop.record.compiler.generated.ParseException generateParseException()":{"name":"generateParseException","returnType":"org.apache.hadoop.record.compiler.generated.ParseException","args":[],"exceptions":[]},"java.util.ArrayList RecordList() throws org.apache.hadoop.record.compiler.generated.ParseException":{"name":"RecordList","returnType":"java.util.ArrayList","args":[],"exceptions":["org.apache.hadoop.record.compiler.generated.ParseException"]},"void disable_tracing()":{"name":"disable_tracing","returnType":"void","args":[],"exceptions":[]},"org.apache.hadoop.record.compiler.JType Type() throws org.apache.hadoop.record.compiler.generated.ParseException":{"name":"Type","returnType":"org.apache.hadoop.record.compiler.JType","args":[],"exceptions":["org.apache.hadoop.record.compiler.generated.ParseException"]},"org.apache.hadoop.record.compiler.JField Field() throws org.apache.hadoop.record.compiler.generated.ParseException":{"name":"Field","returnType":"org.apache.hadoop.record.compiler.JField","args":[],"exceptions":["org.apache.hadoop.record.compiler.generated.ParseException"]},"org.apache.hadoop.record.compiler.JMap Map() throws org.apache.hadoop.record.compiler.generated.ParseException":{"name":"Map","returnType":"org.apache.hadoop.record.compiler.JMap","args":[],"exceptions":["org.apache.hadoop.record.compiler.generated.ParseException"]},"int driver([Ljava.lang.String;)":{"name":"driver","returnType":"int","args":["[Ljava.lang.String;"],"exceptions":[]},"void ReInit(org.apache.hadoop.record.compiler.generated.RccTokenManager)":{"name":"ReInit","returnType":"void","args":["org.apache.hadoop.record.compiler.generated.RccTokenManager"],"exceptions":[]},"void enable_tracing()":{"name":"enable_tracing","returnType":"void","args":[],"exceptions":[]},"void ReInit(java.io.InputStream, java.lang.String)":{"name":"ReInit","returnType":"void","args":["java.io.InputStream","java.lang.String"],"exceptions":[]},"void ReInit(java.io.Reader)":{"name":"ReInit","returnType":"void","args":["java.io.Reader"],"exceptions":[]},"org.apache.hadoop.record.compiler.JRecord Record() throws org.apache.hadoop.record.compiler.generated.ParseException":{"name":"Record","returnType":"org.apache.hadoop.record.compiler.JRecord","args":[],"exceptions":["org.apache.hadoop.record.compiler.generated.ParseException"]},"void usage()":{"name":"usage","returnType":"void","args":[],"exceptions":[]}}},"org.apache.hadoop.fs.FsStatus":{"name":"org.apache.hadoop.fs.FsStatus","methods":{"void write(java.io.DataOutput) throws java.io.IOException":{"name":"write","returnType":"void","args":["java.io.DataOutput"],"exceptions":["java.io.IOException"]},"long getCapacity()":{"name":"getCapacity","returnType":"long","args":[],"exceptions":[]},"long getUsed()":{"name":"getUsed","returnType":"long","args":[],"exceptions":[]},"long getRemaining()":{"name":"getRemaining","returnType":"long","args":[],"exceptions":[]},"void readFields(java.io.DataInput) throws java.io.IOException":{"name":"readFields","returnType":"void","args":["java.io.DataInput"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.io.BooleanWritable":{"name":"org.apache.hadoop.io.BooleanWritable","methods":{"int hashCode()":{"name":"hashCode","returnType":"int","args":[],"exceptions":[]},"void set(boolean)":{"name":"set","returnType":"void","args":["boolean"],"exceptions":[]},"java.lang.String toString()":{"name":"toString","returnType":"java.lang.String","args":[],"exceptions":[]},"void write(java.io.DataOutput) throws java.io.IOException":{"name":"write","returnType":"void","args":["java.io.DataOutput"],"exceptions":["java.io.IOException"]},"boolean get()":{"name":"get","returnType":"boolean","args":[],"exceptions":[]},"int compareTo(org.apache.hadoop.io.BooleanWritable)":{"name":"compareTo","returnType":"int","args":["org.apache.hadoop.io.BooleanWritable"],"exceptions":[]},"int compareTo(java.lang.Object)":{"name":"compareTo","returnType":"int","args":["java.lang.Object"],"exceptions":[]},"boolean equals(java.lang.Object)":{"name":"equals","returnType":"boolean","args":["java.lang.Object"],"exceptions":[]},"void readFields(java.io.DataInput) throws java.io.IOException":{"name":"readFields","returnType":"void","args":["java.io.DataInput"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.fs.ChecksumFileSystem":{"name":"org.apache.hadoop.fs.ChecksumFileSystem","methods":{"void copyToLocalFile(org.apache.hadoop.fs.Path, org.apache.hadoop.fs.Path, boolean) throws java.io.IOException":{"name":"copyToLocalFile","returnType":"void","args":["org.apache.hadoop.fs.Path","org.apache.hadoop.fs.Path","boolean"],"exceptions":["java.io.IOException"]},"void setOwner(org.apache.hadoop.fs.Path, java.lang.String, java.lang.String) throws java.io.IOException":{"name":"setOwner","returnType":"void","args":["org.apache.hadoop.fs.Path","java.lang.String","java.lang.String"],"exceptions":["java.io.IOException"]},"long getChecksumLength(long, int)":{"name":"getChecksumLength","returnType":"long","args":["long","int"],"exceptions":[]},"org.apache.hadoop.fs.FSDataOutputStream append(org.apache.hadoop.fs.Path, int, org.apache.hadoop.util.Progressable) throws java.io.IOException":{"name":"append","returnType":"org.apache.hadoop.fs.FSDataOutputStream","args":["org.apache.hadoop.fs.Path","int","org.apache.hadoop.util.Progressable"],"exceptions":["java.io.IOException"]},"boolean reportChecksumFailure(org.apache.hadoop.fs.Path, org.apache.hadoop.fs.FSDataInputStream, long, org.apache.hadoop.fs.FSDataInputStream, long)":{"name":"reportChecksumFailure","returnType":"boolean","args":["org.apache.hadoop.fs.Path","org.apache.hadoop.fs.FSDataInputStream","long","org.apache.hadoop.fs.FSDataInputStream","long"],"exceptions":[]},"org.apache.hadoop.fs.FSDataInputStream open(org.apache.hadoop.fs.Path, int) throws java.io.IOException":{"name":"open","returnType":"org.apache.hadoop.fs.FSDataInputStream","args":["org.apache.hadoop.fs.Path","int"],"exceptions":["java.io.IOException"]},"boolean setReplication(org.apache.hadoop.fs.Path, short) throws java.io.IOException":{"name":"setReplication","returnType":"boolean","args":["org.apache.hadoop.fs.Path","short"],"exceptions":["java.io.IOException"]},"void completeLocalOutput(org.apache.hadoop.fs.Path, org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"completeLocalOutput","returnType":"void","args":["org.apache.hadoop.fs.Path","org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"boolean rename(org.apache.hadoop.fs.Path, org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"rename","returnType":"boolean","args":["org.apache.hadoop.fs.Path","org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"[Lorg.apache.hadoop.fs.FileStatus; listStatus(org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"listStatus","returnType":"[Lorg.apache.hadoop.fs.FileStatus;","args":["org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"void setAcl(org.apache.hadoop.fs.Path, java.util.List) throws java.io.IOException":{"name":"setAcl","returnType":"void","args":["org.apache.hadoop.fs.Path","java.util.List"],"exceptions":["java.io.IOException"]},"int getBytesPerSum()":{"name":"getBytesPerSum","returnType":"int","args":[],"exceptions":[]},"long getChecksumFileLength(org.apache.hadoop.fs.Path, long)":{"name":"getChecksumFileLength","returnType":"long","args":["org.apache.hadoop.fs.Path","long"],"exceptions":[]},"void setVerifyChecksum(boolean)":{"name":"setVerifyChecksum","returnType":"void","args":["boolean"],"exceptions":[]},"org.apache.hadoop.fs.RemoteIterator listLocatedStatus(org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"listLocatedStatus","returnType":"org.apache.hadoop.fs.RemoteIterator","args":["org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"void setPermission(org.apache.hadoop.fs.Path, org.apache.hadoop.fs.permission.FsPermission) throws java.io.IOException":{"name":"setPermission","returnType":"void","args":["org.apache.hadoop.fs.Path","org.apache.hadoop.fs.permission.FsPermission"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.FileSystem getRawFileSystem()":{"name":"getRawFileSystem","returnType":"org.apache.hadoop.fs.FileSystem","args":[],"exceptions":[]},"org.apache.hadoop.fs.Path startLocalOutput(org.apache.hadoop.fs.Path, org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"startLocalOutput","returnType":"org.apache.hadoop.fs.Path","args":["org.apache.hadoop.fs.Path","org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"double getApproxChkSumLength(long)":{"name":"getApproxChkSumLength","returnType":"double","args":["long"],"exceptions":[]},"void setWriteChecksum(boolean)":{"name":"setWriteChecksum","returnType":"void","args":["boolean"],"exceptions":[]},"void modifyAclEntries(org.apache.hadoop.fs.Path, java.util.List) throws java.io.IOException":{"name":"modifyAclEntries","returnType":"void","args":["org.apache.hadoop.fs.Path","java.util.List"],"exceptions":["java.io.IOException"]},"boolean mkdirs(org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"mkdirs","returnType":"boolean","args":["org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"void removeAcl(org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"removeAcl","returnType":"void","args":["org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"boolean delete(org.apache.hadoop.fs.Path, boolean) throws java.io.IOException":{"name":"delete","returnType":"boolean","args":["org.apache.hadoop.fs.Path","boolean"],"exceptions":["java.io.IOException"]},"void copyToLocalFile(boolean, org.apache.hadoop.fs.Path, org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"copyToLocalFile","returnType":"void","args":["boolean","org.apache.hadoop.fs.Path","org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"boolean isChecksumFile(org.apache.hadoop.fs.Path)":{"name":"isChecksumFile","returnType":"boolean","args":["org.apache.hadoop.fs.Path"],"exceptions":[]},"void copyFromLocalFile(boolean, org.apache.hadoop.fs.Path, org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"copyFromLocalFile","returnType":"void","args":["boolean","org.apache.hadoop.fs.Path","org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.Path getChecksumFile(org.apache.hadoop.fs.Path)":{"name":"getChecksumFile","returnType":"org.apache.hadoop.fs.Path","args":["org.apache.hadoop.fs.Path"],"exceptions":[]},"void setConf(org.apache.hadoop.conf.Configuration)":{"name":"setConf","returnType":"void","args":["org.apache.hadoop.conf.Configuration"],"exceptions":[]},"org.apache.hadoop.fs.FSDataOutputStream createNonRecursive(org.apache.hadoop.fs.Path, org.apache.hadoop.fs.permission.FsPermission, boolean, int, short, long, org.apache.hadoop.util.Progressable) throws java.io.IOException":{"name":"createNonRecursive","returnType":"org.apache.hadoop.fs.FSDataOutputStream","args":["org.apache.hadoop.fs.Path","org.apache.hadoop.fs.permission.FsPermission","boolean","int","short","long","org.apache.hadoop.util.Progressable"],"exceptions":["java.io.IOException"]},"boolean truncate(org.apache.hadoop.fs.Path, long) throws java.io.IOException":{"name":"truncate","returnType":"boolean","args":["org.apache.hadoop.fs.Path","long"],"exceptions":["java.io.IOException"]},"void removeDefaultAcl(org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"removeDefaultAcl","returnType":"void","args":["org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.FSDataOutputStream create(org.apache.hadoop.fs.Path, org.apache.hadoop.fs.permission.FsPermission, boolean, int, short, long, org.apache.hadoop.util.Progressable) throws java.io.IOException":{"name":"create","returnType":"org.apache.hadoop.fs.FSDataOutputStream","args":["org.apache.hadoop.fs.Path","org.apache.hadoop.fs.permission.FsPermission","boolean","int","short","long","org.apache.hadoop.util.Progressable"],"exceptions":["java.io.IOException"]},"void removeAclEntries(org.apache.hadoop.fs.Path, java.util.List) throws java.io.IOException":{"name":"removeAclEntries","returnType":"void","args":["org.apache.hadoop.fs.Path","java.util.List"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.util.bloom.BloomFilter":{"name":"org.apache.hadoop.util.bloom.BloomFilter","methods":{"int getVectorSize()":{"name":"getVectorSize","returnType":"int","args":[],"exceptions":[]},"java.lang.String toString()":{"name":"toString","returnType":"java.lang.String","args":[],"exceptions":[]},"void write(java.io.DataOutput) throws java.io.IOException":{"name":"write","returnType":"void","args":["java.io.DataOutput"],"exceptions":["java.io.IOException"]},"void add(org.apache.hadoop.util.bloom.Key)":{"name":"add","returnType":"void","args":["org.apache.hadoop.util.bloom.Key"],"exceptions":[]},"void not()":{"name":"not","returnType":"void","args":[],"exceptions":[]},"boolean membershipTest(org.apache.hadoop.util.bloom.Key)":{"name":"membershipTest","returnType":"boolean","args":["org.apache.hadoop.util.bloom.Key"],"exceptions":[]},"void and(org.apache.hadoop.util.bloom.Filter)":{"name":"and","returnType":"void","args":["org.apache.hadoop.util.bloom.Filter"],"exceptions":[]},"void readFields(java.io.DataInput) throws java.io.IOException":{"name":"readFields","returnType":"void","args":["java.io.DataInput"],"exceptions":["java.io.IOException"]},"void or(org.apache.hadoop.util.bloom.Filter)":{"name":"or","returnType":"void","args":["org.apache.hadoop.util.bloom.Filter"],"exceptions":[]},"void xor(org.apache.hadoop.util.bloom.Filter)":{"name":"xor","returnType":"void","args":["org.apache.hadoop.util.bloom.Filter"],"exceptions":[]}}},"org.apache.hadoop.fs.ftp.FTPException":{"name":"org.apache.hadoop.fs.ftp.FTPException","methods":{}},"org.apache.hadoop.record.XmlRecordOutput":{"name":"org.apache.hadoop.record.XmlRecordOutput","methods":{"void writeBuffer(org.apache.hadoop.record.Buffer, java.lang.String) throws java.io.IOException":{"name":"writeBuffer","returnType":"void","args":["org.apache.hadoop.record.Buffer","java.lang.String"],"exceptions":["java.io.IOException"]},"void startMap(java.util.TreeMap, java.lang.String) throws java.io.IOException":{"name":"startMap","returnType":"void","args":["java.util.TreeMap","java.lang.String"],"exceptions":["java.io.IOException"]},"void writeBool(boolean, java.lang.String) throws java.io.IOException":{"name":"writeBool","returnType":"void","args":["boolean","java.lang.String"],"exceptions":["java.io.IOException"]},"void writeFloat(float, java.lang.String) throws java.io.IOException":{"name":"writeFloat","returnType":"void","args":["float","java.lang.String"],"exceptions":["java.io.IOException"]},"void writeInt(int, java.lang.String) throws java.io.IOException":{"name":"writeInt","returnType":"void","args":["int","java.lang.String"],"exceptions":["java.io.IOException"]},"void startRecord(org.apache.hadoop.record.Record, java.lang.String) throws java.io.IOException":{"name":"startRecord","returnType":"void","args":["org.apache.hadoop.record.Record","java.lang.String"],"exceptions":["java.io.IOException"]},"void endRecord(org.apache.hadoop.record.Record, java.lang.String) throws java.io.IOException":{"name":"endRecord","returnType":"void","args":["org.apache.hadoop.record.Record","java.lang.String"],"exceptions":["java.io.IOException"]},"void endVector(java.util.ArrayList, java.lang.String) throws java.io.IOException":{"name":"endVector","returnType":"void","args":["java.util.ArrayList","java.lang.String"],"exceptions":["java.io.IOException"]},"void endMap(java.util.TreeMap, java.lang.String) throws java.io.IOException":{"name":"endMap","returnType":"void","args":["java.util.TreeMap","java.lang.String"],"exceptions":["java.io.IOException"]},"void writeByte(byte, java.lang.String) throws java.io.IOException":{"name":"writeByte","returnType":"void","args":["byte","java.lang.String"],"exceptions":["java.io.IOException"]},"void startVector(java.util.ArrayList, java.lang.String) throws java.io.IOException":{"name":"startVector","returnType":"void","args":["java.util.ArrayList","java.lang.String"],"exceptions":["java.io.IOException"]},"void writeDouble(double, java.lang.String) throws java.io.IOException":{"name":"writeDouble","returnType":"void","args":["double","java.lang.String"],"exceptions":["java.io.IOException"]},"void writeLong(long, java.lang.String) throws java.io.IOException":{"name":"writeLong","returnType":"void","args":["long","java.lang.String"],"exceptions":["java.io.IOException"]},"void writeString(java.lang.String, java.lang.String) throws java.io.IOException":{"name":"writeString","returnType":"void","args":["java.lang.String","java.lang.String"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.record.Buffer":{"name":"org.apache.hadoop.record.Buffer","methods":{"int hashCode()":{"name":"hashCode","returnType":"int","args":[],"exceptions":[]},"[B get()":{"name":"get","returnType":"[B","args":[],"exceptions":[]},"int getCapacity()":{"name":"getCapacity","returnType":"int","args":[],"exceptions":[]},"void truncate()":{"name":"truncate","returnType":"void","args":[],"exceptions":[]},"void append([B)":{"name":"append","returnType":"void","args":["[B"],"exceptions":[]},"int compareTo(java.lang.Object)":{"name":"compareTo","returnType":"int","args":["java.lang.Object"],"exceptions":[]},"void setCapacity(int)":{"name":"setCapacity","returnType":"void","args":["int"],"exceptions":[]},"boolean equals(java.lang.Object)":{"name":"equals","returnType":"boolean","args":["java.lang.Object"],"exceptions":[]},"void append([B, int, int)":{"name":"append","returnType":"void","args":["[B","int","int"],"exceptions":[]},"java.lang.String toString(java.lang.String) throws java.io.UnsupportedEncodingException":{"name":"toString","returnType":"java.lang.String","args":["java.lang.String"],"exceptions":["java.io.UnsupportedEncodingException"]},"java.lang.Object clone() throws java.lang.CloneNotSupportedException":{"name":"clone","returnType":"java.lang.Object","args":[],"exceptions":["java.lang.CloneNotSupportedException"]},"java.lang.String toString()":{"name":"toString","returnType":"java.lang.String","args":[],"exceptions":[]},"void reset()":{"name":"reset","returnType":"void","args":[],"exceptions":[]},"int getCount()":{"name":"getCount","returnType":"int","args":[],"exceptions":[]},"void set([B)":{"name":"set","returnType":"void","args":["[B"],"exceptions":[]},"void copy([B, int, int)":{"name":"copy","returnType":"void","args":["[B","int","int"],"exceptions":[]}}},"org.apache.hadoop.io.ElasticByteBufferPool":{"name":"org.apache.hadoop.io.ElasticByteBufferPool","methods":{"void putBuffer(java.nio.ByteBuffer)":{"name":"putBuffer","returnType":"void","args":["java.nio.ByteBuffer"],"exceptions":[]},"java.nio.ByteBuffer getBuffer(boolean, int)":{"name":"getBuffer","returnType":"java.nio.ByteBuffer","args":["boolean","int"],"exceptions":[]}}},"org.apache.hadoop.record.compiler.JInt":{"name":"org.apache.hadoop.record.compiler.JInt","methods":{}},"org.apache.hadoop.io.WritableComparable":{"name":"org.apache.hadoop.io.WritableComparable","methods":{}},"org.apache.hadoop.service.ServiceStateChangeListener":{"name":"org.apache.hadoop.service.ServiceStateChangeListener","methods":{"void stateChanged(org.apache.hadoop.service.Service)":{"name":"stateChanged","returnType":"void","args":["org.apache.hadoop.service.Service"],"exceptions":[]}}},"org.apache.hadoop.metrics2.util.MBeans":{"name":"org.apache.hadoop.metrics2.util.MBeans","methods":{"void unregister(javax.management.ObjectName)":{"name":"unregister","returnType":"void","args":["javax.management.ObjectName"],"exceptions":[]},"javax.management.ObjectName register(java.lang.String, java.lang.String, java.lang.Object)":{"name":"register","returnType":"javax.management.ObjectName","args":["java.lang.String","java.lang.String","java.lang.Object"],"exceptions":[]}}},"org.apache.hadoop.record.compiler.JFile":{"name":"org.apache.hadoop.record.compiler.JFile","methods":{"int genCode(java.lang.String, java.lang.String, java.util.ArrayList) throws java.io.IOException":{"name":"genCode","returnType":"int","args":["java.lang.String","java.lang.String","java.util.ArrayList"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.io.WritableComparator":{"name":"org.apache.hadoop.io.WritableComparator","methods":{"long readLong([B, int)":{"name":"readLong","returnType":"long","args":["[B","int"],"exceptions":[]},"int readUnsignedShort([B, int)":{"name":"readUnsignedShort","returnType":"int","args":["[B","int"],"exceptions":[]},"int compare(java.lang.Object, java.lang.Object)":{"name":"compare","returnType":"int","args":["java.lang.Object","java.lang.Object"],"exceptions":[]},"int hashBytes([B, int)":{"name":"hashBytes","returnType":"int","args":["[B","int"],"exceptions":[]},"void define(java.lang.Class, org.apache.hadoop.io.WritableComparator)":{"name":"define","returnType":"void","args":["java.lang.Class","org.apache.hadoop.io.WritableComparator"],"exceptions":[]},"int compare(org.apache.hadoop.io.WritableComparable, org.apache.hadoop.io.WritableComparable)":{"name":"compare","returnType":"int","args":["org.apache.hadoop.io.WritableComparable","org.apache.hadoop.io.WritableComparable"],"exceptions":[]},"org.apache.hadoop.conf.Configuration getConf()":{"name":"getConf","returnType":"org.apache.hadoop.conf.Configuration","args":[],"exceptions":[]},"int compareBytes([B, int, int, [B, int, int)":{"name":"compareBytes","returnType":"int","args":["[B","int","int","[B","int","int"],"exceptions":[]},"org.apache.hadoop.io.WritableComparator get(java.lang.Class, org.apache.hadoop.conf.Configuration)":{"name":"get","returnType":"org.apache.hadoop.io.WritableComparator","args":["java.lang.Class","org.apache.hadoop.conf.Configuration"],"exceptions":[]},"float readFloat([B, int)":{"name":"readFloat","returnType":"float","args":["[B","int"],"exceptions":[]},"int hashBytes([B, int, int)":{"name":"hashBytes","returnType":"int","args":["[B","int","int"],"exceptions":[]},"long readVLong([B, int) throws java.io.IOException":{"name":"readVLong","returnType":"long","args":["[B","int"],"exceptions":["java.io.IOException"]},"java.lang.Class getKeyClass()":{"name":"getKeyClass","returnType":"java.lang.Class","args":[],"exceptions":[]},"int compare([B, int, int, [B, int, int)":{"name":"compare","returnType":"int","args":["[B","int","int","[B","int","int"],"exceptions":[]},"int readInt([B, int)":{"name":"readInt","returnType":"int","args":["[B","int"],"exceptions":[]},"void setConf(org.apache.hadoop.conf.Configuration)":{"name":"setConf","returnType":"void","args":["org.apache.hadoop.conf.Configuration"],"exceptions":[]},"org.apache.hadoop.io.WritableComparator get(java.lang.Class)":{"name":"get","returnType":"org.apache.hadoop.io.WritableComparator","args":["java.lang.Class"],"exceptions":[]},"int readVInt([B, int) throws java.io.IOException":{"name":"readVInt","returnType":"int","args":["[B","int"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.io.WritableComparable newKey()":{"name":"newKey","returnType":"org.apache.hadoop.io.WritableComparable","args":[],"exceptions":[]},"double readDouble([B, int)":{"name":"readDouble","returnType":"double","args":["[B","int"],"exceptions":[]}}},"org.apache.hadoop.io.Stringifier":{"name":"org.apache.hadoop.io.Stringifier","methods":{"java.lang.Object fromString(java.lang.String) throws java.io.IOException":{"name":"fromString","returnType":"java.lang.Object","args":["java.lang.String"],"exceptions":["java.io.IOException"]},"java.lang.String toString(java.lang.Object) throws java.io.IOException":{"name":"toString","returnType":"java.lang.String","args":["java.lang.Object"],"exceptions":["java.io.IOException"]},"void close() throws java.io.IOException":{"name":"close","returnType":"void","args":[],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.fs.permission.FsAction":{"name":"org.apache.hadoop.fs.permission.FsAction","methods":{"org.apache.hadoop.fs.permission.FsAction not()":{"name":"not","returnType":"org.apache.hadoop.fs.permission.FsAction","args":[],"exceptions":[]},"org.apache.hadoop.fs.permission.FsAction getFsAction(java.lang.String)":{"name":"getFsAction","returnType":"org.apache.hadoop.fs.permission.FsAction","args":["java.lang.String"],"exceptions":[]},"org.apache.hadoop.fs.permission.FsAction and(org.apache.hadoop.fs.permission.FsAction)":{"name":"and","returnType":"org.apache.hadoop.fs.permission.FsAction","args":["org.apache.hadoop.fs.permission.FsAction"],"exceptions":[]},"[Lorg.apache.hadoop.fs.permission.FsAction; values()":{"name":"values","returnType":"[Lorg.apache.hadoop.fs.permission.FsAction;","args":[],"exceptions":[]},"org.apache.hadoop.fs.permission.FsAction valueOf(java.lang.String)":{"name":"valueOf","returnType":"org.apache.hadoop.fs.permission.FsAction","args":["java.lang.String"],"exceptions":[]},"boolean implies(org.apache.hadoop.fs.permission.FsAction)":{"name":"implies","returnType":"boolean","args":["org.apache.hadoop.fs.permission.FsAction"],"exceptions":[]},"org.apache.hadoop.fs.permission.FsAction or(org.apache.hadoop.fs.permission.FsAction)":{"name":"or","returnType":"org.apache.hadoop.fs.permission.FsAction","args":["org.apache.hadoop.fs.permission.FsAction"],"exceptions":[]}}},"org.apache.hadoop.io.ObjectWritable":{"name":"org.apache.hadoop.io.ObjectWritable","methods":{"void writeObject(java.io.DataOutput, java.lang.Object, java.lang.Class, org.apache.hadoop.conf.Configuration, boolean) throws java.io.IOException":{"name":"writeObject","returnType":"void","args":["java.io.DataOutput","java.lang.Object","java.lang.Class","org.apache.hadoop.conf.Configuration","boolean"],"exceptions":["java.io.IOException"]},"java.lang.Object readObject(java.io.DataInput, org.apache.hadoop.io.ObjectWritable, org.apache.hadoop.conf.Configuration) throws java.io.IOException":{"name":"readObject","returnType":"java.lang.Object","args":["java.io.DataInput","org.apache.hadoop.io.ObjectWritable","org.apache.hadoop.conf.Configuration"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.conf.Configuration getConf()":{"name":"getConf","returnType":"org.apache.hadoop.conf.Configuration","args":[],"exceptions":[]},"void set(java.lang.Object)":{"name":"set","returnType":"void","args":["java.lang.Object"],"exceptions":[]},"java.lang.Class loadClass(org.apache.hadoop.conf.Configuration, java.lang.String)":{"name":"loadClass","returnType":"java.lang.Class","args":["org.apache.hadoop.conf.Configuration","java.lang.String"],"exceptions":[]},"java.lang.Object readObject(java.io.DataInput, org.apache.hadoop.conf.Configuration) throws java.io.IOException":{"name":"readObject","returnType":"java.lang.Object","args":["java.io.DataInput","org.apache.hadoop.conf.Configuration"],"exceptions":["java.io.IOException"]},"java.lang.String toString()":{"name":"toString","returnType":"java.lang.String","args":[],"exceptions":[]},"void writeObject(java.io.DataOutput, java.lang.Object, java.lang.Class, org.apache.hadoop.conf.Configuration) throws java.io.IOException":{"name":"writeObject","returnType":"void","args":["java.io.DataOutput","java.lang.Object","java.lang.Class","org.apache.hadoop.conf.Configuration"],"exceptions":["java.io.IOException"]},"void write(java.io.DataOutput) throws java.io.IOException":{"name":"write","returnType":"void","args":["java.io.DataOutput"],"exceptions":["java.io.IOException"]},"java.lang.Class getDeclaredClass()":{"name":"getDeclaredClass","returnType":"java.lang.Class","args":[],"exceptions":[]},"void setConf(org.apache.hadoop.conf.Configuration)":{"name":"setConf","returnType":"void","args":["org.apache.hadoop.conf.Configuration"],"exceptions":[]},"java.lang.Object get()":{"name":"get","returnType":"java.lang.Object","args":[],"exceptions":[]},"void readFields(java.io.DataInput) throws java.io.IOException":{"name":"readFields","returnType":"void","args":["java.io.DataInput"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.fs.FSDataOutputStream":{"name":"org.apache.hadoop.fs.FSDataOutputStream","methods":{"void hflush() throws java.io.IOException":{"name":"hflush","returnType":"void","args":[],"exceptions":["java.io.IOException"]},"long getPos() throws java.io.IOException":{"name":"getPos","returnType":"long","args":[],"exceptions":["java.io.IOException"]},"void sync() throws java.io.IOException":{"name":"sync","returnType":"void","args":[],"exceptions":["java.io.IOException"]},"void hsync() throws java.io.IOException":{"name":"hsync","returnType":"void","args":[],"exceptions":["java.io.IOException"]},"void setDropBehind(java.lang.Boolean) throws java.io.IOException":{"name":"setDropBehind","returnType":"void","args":["java.lang.Boolean"],"exceptions":["java.io.IOException"]},"java.io.OutputStream getWrappedStream()":{"name":"getWrappedStream","returnType":"java.io.OutputStream","args":[],"exceptions":[]},"void close() throws java.io.IOException":{"name":"close","returnType":"void","args":[],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.fs.XAttrCodec":{"name":"org.apache.hadoop.fs.XAttrCodec","methods":{"[Lorg.apache.hadoop.fs.XAttrCodec; values()":{"name":"values","returnType":"[Lorg.apache.hadoop.fs.XAttrCodec;","args":[],"exceptions":[]},"java.lang.String encodeValue([B, org.apache.hadoop.fs.XAttrCodec) throws java.io.IOException":{"name":"encodeValue","returnType":"java.lang.String","args":["[B","org.apache.hadoop.fs.XAttrCodec"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.XAttrCodec valueOf(java.lang.String)":{"name":"valueOf","returnType":"org.apache.hadoop.fs.XAttrCodec","args":["java.lang.String"],"exceptions":[]},"[B decodeValue(java.lang.String) throws java.io.IOException":{"name":"decodeValue","returnType":"[B","args":["java.lang.String"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.record.compiler.JLong":{"name":"org.apache.hadoop.record.compiler.JLong","methods":{}},"org.apache.hadoop.fs.FilterFileSystem":{"name":"org.apache.hadoop.fs.FilterFileSystem","methods":{"void concat(org.apache.hadoop.fs.Path, [Lorg.apache.hadoop.fs.Path;) throws java.io.IOException":{"name":"concat","returnType":"void","args":["org.apache.hadoop.fs.Path","[Lorg.apache.hadoop.fs.Path;"],"exceptions":["java.io.IOException"]},"void initialize(java.net.URI, org.apache.hadoop.conf.Configuration) throws java.io.IOException":{"name":"initialize","returnType":"void","args":["java.net.URI","org.apache.hadoop.conf.Configuration"],"exceptions":["java.io.IOException"]},"short getDefaultReplication(org.apache.hadoop.fs.Path)":{"name":"getDefaultReplication","returnType":"short","args":["org.apache.hadoop.fs.Path"],"exceptions":[]},"[B getXAttr(org.apache.hadoop.fs.Path, java.lang.String) throws java.io.IOException":{"name":"getXAttr","returnType":"[B","args":["org.apache.hadoop.fs.Path","java.lang.String"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.Path makeQualified(org.apache.hadoop.fs.Path)":{"name":"makeQualified","returnType":"org.apache.hadoop.fs.Path","args":["org.apache.hadoop.fs.Path"],"exceptions":[]},"void setTimes(org.apache.hadoop.fs.Path, long, long) throws java.io.IOException":{"name":"setTimes","returnType":"void","args":["org.apache.hadoop.fs.Path","long","long"],"exceptions":["java.io.IOException"]},"long getUsed() throws java.io.IOException":{"name":"getUsed","returnType":"long","args":[],"exceptions":["java.io.IOException"]},"boolean setReplication(org.apache.hadoop.fs.Path, short) throws java.io.IOException":{"name":"setReplication","returnType":"boolean","args":["org.apache.hadoop.fs.Path","short"],"exceptions":["java.io.IOException"]},"[Lorg.apache.hadoop.fs.FileSystem; getChildFileSystems()":{"name":"getChildFileSystems","returnType":"[Lorg.apache.hadoop.fs.FileSystem;","args":[],"exceptions":[]},"[Lorg.apache.hadoop.fs.FileStatus; listStatus(org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"listStatus","returnType":"[Lorg.apache.hadoop.fs.FileStatus;","args":["org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"java.util.Map getXAttrs(org.apache.hadoop.fs.Path, java.util.List) throws java.io.IOException":{"name":"getXAttrs","returnType":"java.util.Map","args":["org.apache.hadoop.fs.Path","java.util.List"],"exceptions":["java.io.IOException"]},"void setVerifyChecksum(boolean)":{"name":"setVerifyChecksum","returnType":"void","args":["boolean"],"exceptions":[]},"org.apache.hadoop.fs.FileStatus getFileLinkStatus(org.apache.hadoop.fs.Path) throws org.apache.hadoop.security.AccessControlException, org.apache.hadoop.fs.UnsupportedFileSystemException, java.io.IOException, java.io.FileNotFoundException":{"name":"getFileLinkStatus","returnType":"org.apache.hadoop.fs.FileStatus","args":["org.apache.hadoop.fs.Path"],"exceptions":["org.apache.hadoop.security.AccessControlException","org.apache.hadoop.fs.UnsupportedFileSystemException","java.io.IOException","java.io.FileNotFoundException"]},"void copyFromLocalFile(boolean, boolean, org.apache.hadoop.fs.Path, org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"copyFromLocalFile","returnType":"void","args":["boolean","boolean","org.apache.hadoop.fs.Path","org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"void renameSnapshot(org.apache.hadoop.fs.Path, java.lang.String, java.lang.String) throws java.io.IOException":{"name":"renameSnapshot","returnType":"void","args":["org.apache.hadoop.fs.Path","java.lang.String","java.lang.String"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.FsStatus getStatus(org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"getStatus","returnType":"org.apache.hadoop.fs.FsStatus","args":["org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.FSDataOutputStream createNonRecursive(org.apache.hadoop.fs.Path, org.apache.hadoop.fs.permission.FsPermission, java.util.EnumSet, int, short, long, org.apache.hadoop.util.Progressable) throws java.io.IOException":{"name":"createNonRecursive","returnType":"org.apache.hadoop.fs.FSDataOutputStream","args":["org.apache.hadoop.fs.Path","org.apache.hadoop.fs.permission.FsPermission","java.util.EnumSet","int","short","long","org.apache.hadoop.util.Progressable"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.Path getLinkTarget(org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"getLinkTarget","returnType":"org.apache.hadoop.fs.Path","args":["org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.Path createSnapshot(org.apache.hadoop.fs.Path, java.lang.String) throws java.io.IOException":{"name":"createSnapshot","returnType":"org.apache.hadoop.fs.Path","args":["org.apache.hadoop.fs.Path","java.lang.String"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.RemoteIterator listStatusIterator(org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"listStatusIterator","returnType":"org.apache.hadoop.fs.RemoteIterator","args":["org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"void removeAcl(org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"removeAcl","returnType":"void","args":["org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"void createSymlink(org.apache.hadoop.fs.Path, org.apache.hadoop.fs.Path, boolean) throws org.apache.hadoop.fs.FileAlreadyExistsException, org.apache.hadoop.security.AccessControlException, org.apache.hadoop.fs.ParentNotDirectoryException, org.apache.hadoop.fs.UnsupportedFileSystemException, java.io.IOException, java.io.FileNotFoundException":{"name":"createSymlink","returnType":"void","args":["org.apache.hadoop.fs.Path","org.apache.hadoop.fs.Path","boolean"],"exceptions":["org.apache.hadoop.fs.FileAlreadyExistsException","org.apache.hadoop.security.AccessControlException","org.apache.hadoop.fs.ParentNotDirectoryException","org.apache.hadoop.fs.UnsupportedFileSystemException","java.io.IOException","java.io.FileNotFoundException"]},"boolean delete(org.apache.hadoop.fs.Path, boolean) throws java.io.IOException":{"name":"delete","returnType":"boolean","args":["org.apache.hadoop.fs.Path","boolean"],"exceptions":["java.io.IOException"]},"boolean mkdirs(org.apache.hadoop.fs.Path, org.apache.hadoop.fs.permission.FsPermission) throws java.io.IOException":{"name":"mkdirs","returnType":"boolean","args":["org.apache.hadoop.fs.Path","org.apache.hadoop.fs.permission.FsPermission"],"exceptions":["java.io.IOException"]},"void copyFromLocalFile(boolean, org.apache.hadoop.fs.Path, org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"copyFromLocalFile","returnType":"void","args":["boolean","org.apache.hadoop.fs.Path","org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"void access(org.apache.hadoop.fs.Path, org.apache.hadoop.fs.permission.FsAction) throws org.apache.hadoop.security.AccessControlException, java.io.IOException, java.io.FileNotFoundException":{"name":"access","returnType":"void","args":["org.apache.hadoop.fs.Path","org.apache.hadoop.fs.permission.FsAction"],"exceptions":["org.apache.hadoop.security.AccessControlException","java.io.IOException","java.io.FileNotFoundException"]},"void removeAclEntries(org.apache.hadoop.fs.Path, java.util.List) throws java.io.IOException":{"name":"removeAclEntries","returnType":"void","args":["org.apache.hadoop.fs.Path","java.util.List"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.permission.AclStatus getAclStatus(org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"getAclStatus","returnType":"org.apache.hadoop.fs.permission.AclStatus","args":["org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"void setOwner(org.apache.hadoop.fs.Path, java.lang.String, java.lang.String) throws java.io.IOException":{"name":"setOwner","returnType":"void","args":["org.apache.hadoop.fs.Path","java.lang.String","java.lang.String"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.FsServerDefaults getServerDefaults(org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"getServerDefaults","returnType":"org.apache.hadoop.fs.FsServerDefaults","args":["org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.conf.Configuration getConf()":{"name":"getConf","returnType":"org.apache.hadoop.conf.Configuration","args":[],"exceptions":[]},"void deleteSnapshot(org.apache.hadoop.fs.Path, java.lang.String) throws java.io.IOException":{"name":"deleteSnapshot","returnType":"void","args":["org.apache.hadoop.fs.Path","java.lang.String"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.FSDataOutputStream append(org.apache.hadoop.fs.Path, int, org.apache.hadoop.util.Progressable) throws java.io.IOException":{"name":"append","returnType":"org.apache.hadoop.fs.FSDataOutputStream","args":["org.apache.hadoop.fs.Path","int","org.apache.hadoop.util.Progressable"],"exceptions":["java.io.IOException"]},"long getDefaultBlockSize()":{"name":"getDefaultBlockSize","returnType":"long","args":[],"exceptions":[]},"org.apache.hadoop.fs.FSDataInputStream open(org.apache.hadoop.fs.Path, int) throws java.io.IOException":{"name":"open","returnType":"org.apache.hadoop.fs.FSDataInputStream","args":["org.apache.hadoop.fs.Path","int"],"exceptions":["java.io.IOException"]},"java.net.URI getUri()":{"name":"getUri","returnType":"java.net.URI","args":[],"exceptions":[]},"org.apache.hadoop.fs.Path getHomeDirectory()":{"name":"getHomeDirectory","returnType":"org.apache.hadoop.fs.Path","args":[],"exceptions":[]},"void completeLocalOutput(org.apache.hadoop.fs.Path, org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"completeLocalOutput","returnType":"void","args":["org.apache.hadoop.fs.Path","org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"boolean rename(org.apache.hadoop.fs.Path, org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"rename","returnType":"boolean","args":["org.apache.hadoop.fs.Path","org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.FileStatus getFileStatus(org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"getFileStatus","returnType":"org.apache.hadoop.fs.FileStatus","args":["org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.FileChecksum getFileChecksum(org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"getFileChecksum","returnType":"org.apache.hadoop.fs.FileChecksum","args":["org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"void removeXAttr(org.apache.hadoop.fs.Path, java.lang.String) throws java.io.IOException":{"name":"removeXAttr","returnType":"void","args":["org.apache.hadoop.fs.Path","java.lang.String"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.RemoteIterator listCorruptFileBlocks(org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"listCorruptFileBlocks","returnType":"org.apache.hadoop.fs.RemoteIterator","args":["org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.Path getWorkingDirectory()":{"name":"getWorkingDirectory","returnType":"org.apache.hadoop.fs.Path","args":[],"exceptions":[]},"java.util.List listXAttrs(org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"listXAttrs","returnType":"java.util.List","args":["org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"void setAcl(org.apache.hadoop.fs.Path, java.util.List) throws java.io.IOException":{"name":"setAcl","returnType":"void","args":["org.apache.hadoop.fs.Path","java.util.List"],"exceptions":["java.io.IOException"]},"void setWorkingDirectory(org.apache.hadoop.fs.Path)":{"name":"setWorkingDirectory","returnType":"void","args":["org.apache.hadoop.fs.Path"],"exceptions":[]},"org.apache.hadoop.fs.FileChecksum getFileChecksum(org.apache.hadoop.fs.Path, long) throws java.io.IOException":{"name":"getFileChecksum","returnType":"org.apache.hadoop.fs.FileChecksum","args":["org.apache.hadoop.fs.Path","long"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.RemoteIterator listLocatedStatus(org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"listLocatedStatus","returnType":"org.apache.hadoop.fs.RemoteIterator","args":["org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"void copyFromLocalFile(boolean, boolean, [Lorg.apache.hadoop.fs.Path;, org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"copyFromLocalFile","returnType":"void","args":["boolean","boolean","[Lorg.apache.hadoop.fs.Path;","org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"void setPermission(org.apache.hadoop.fs.Path, org.apache.hadoop.fs.permission.FsPermission) throws java.io.IOException":{"name":"setPermission","returnType":"void","args":["org.apache.hadoop.fs.Path","org.apache.hadoop.fs.permission.FsPermission"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.Path resolvePath(org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"resolvePath","returnType":"org.apache.hadoop.fs.Path","args":["org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"java.util.Map getXAttrs(org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"getXAttrs","returnType":"java.util.Map","args":["org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.FileSystem getRawFileSystem()":{"name":"getRawFileSystem","returnType":"org.apache.hadoop.fs.FileSystem","args":[],"exceptions":[]},"org.apache.hadoop.fs.Path startLocalOutput(org.apache.hadoop.fs.Path, org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"startLocalOutput","returnType":"org.apache.hadoop.fs.Path","args":["org.apache.hadoop.fs.Path","org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"void setWriteChecksum(boolean)":{"name":"setWriteChecksum","returnType":"void","args":["boolean"],"exceptions":[]},"void modifyAclEntries(org.apache.hadoop.fs.Path, java.util.List) throws java.io.IOException":{"name":"modifyAclEntries","returnType":"void","args":["org.apache.hadoop.fs.Path","java.util.List"],"exceptions":["java.io.IOException"]},"short getDefaultReplication()":{"name":"getDefaultReplication","returnType":"short","args":[],"exceptions":[]},"void setXAttr(org.apache.hadoop.fs.Path, java.lang.String, [B) throws java.io.IOException":{"name":"setXAttr","returnType":"void","args":["org.apache.hadoop.fs.Path","java.lang.String","[B"],"exceptions":["java.io.IOException"]},"void copyToLocalFile(boolean, org.apache.hadoop.fs.Path, org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"copyToLocalFile","returnType":"void","args":["boolean","org.apache.hadoop.fs.Path","org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"[Lorg.apache.hadoop.fs.BlockLocation; getFileBlockLocations(org.apache.hadoop.fs.FileStatus, long, long) throws java.io.IOException":{"name":"getFileBlockLocations","returnType":"[Lorg.apache.hadoop.fs.BlockLocation;","args":["org.apache.hadoop.fs.FileStatus","long","long"],"exceptions":["java.io.IOException"]},"void setXAttr(org.apache.hadoop.fs.Path, java.lang.String, [B, java.util.EnumSet) throws java.io.IOException":{"name":"setXAttr","returnType":"void","args":["org.apache.hadoop.fs.Path","java.lang.String","[B","java.util.EnumSet"],"exceptions":["java.io.IOException"]},"boolean supportsSymlinks()":{"name":"supportsSymlinks","returnType":"boolean","args":[],"exceptions":[]},"org.apache.hadoop.fs.FSDataOutputStream create(org.apache.hadoop.fs.Path, org.apache.hadoop.fs.permission.FsPermission, java.util.EnumSet, int, short, long, org.apache.hadoop.util.Progressable, org.apache.hadoop.fs.Options$ChecksumOpt) throws java.io.IOException":{"name":"create","returnType":"org.apache.hadoop.fs.FSDataOutputStream","args":["org.apache.hadoop.fs.Path","org.apache.hadoop.fs.permission.FsPermission","java.util.EnumSet","int","short","long","org.apache.hadoop.util.Progressable","org.apache.hadoop.fs.Options$ChecksumOpt"],"exceptions":["java.io.IOException"]},"boolean truncate(org.apache.hadoop.fs.Path, long) throws java.io.IOException":{"name":"truncate","returnType":"boolean","args":["org.apache.hadoop.fs.Path","long"],"exceptions":["java.io.IOException"]},"void removeDefaultAcl(org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"removeDefaultAcl","returnType":"void","args":["org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.FSDataOutputStream create(org.apache.hadoop.fs.Path, org.apache.hadoop.fs.permission.FsPermission, boolean, int, short, long, org.apache.hadoop.util.Progressable) throws java.io.IOException":{"name":"create","returnType":"org.apache.hadoop.fs.FSDataOutputStream","args":["org.apache.hadoop.fs.Path","org.apache.hadoop.fs.permission.FsPermission","boolean","int","short","long","org.apache.hadoop.util.Progressable"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.FsServerDefaults getServerDefaults() throws java.io.IOException":{"name":"getServerDefaults","returnType":"org.apache.hadoop.fs.FsServerDefaults","args":[],"exceptions":["java.io.IOException"]},"long getDefaultBlockSize(org.apache.hadoop.fs.Path)":{"name":"getDefaultBlockSize","returnType":"long","args":["org.apache.hadoop.fs.Path"],"exceptions":[]},"void close() throws java.io.IOException":{"name":"close","returnType":"void","args":[],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.fs.FSError":{"name":"org.apache.hadoop.fs.FSError","methods":{}},"org.apache.hadoop.record.compiler.JRecord":{"name":"org.apache.hadoop.record.compiler.JRecord","methods":{}},"org.apache.hadoop.util.PureJavaCrc32C":{"name":"org.apache.hadoop.util.PureJavaCrc32C","methods":{"void update([B, int, int)":{"name":"update","returnType":"void","args":["[B","int","int"],"exceptions":[]},"long getValue()":{"name":"getValue","returnType":"long","args":[],"exceptions":[]},"void reset()":{"name":"reset","returnType":"void","args":[],"exceptions":[]},"void update(int)":{"name":"update","returnType":"void","args":["int"],"exceptions":[]}}},"org.apache.hadoop.io.BloomMapFile":{"name":"org.apache.hadoop.io.BloomMapFile","methods":{"void delete(org.apache.hadoop.fs.FileSystem, java.lang.String) throws java.io.IOException":{"name":"delete","returnType":"void","args":["org.apache.hadoop.fs.FileSystem","java.lang.String"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.record.meta.StructTypeID":{"name":"org.apache.hadoop.record.meta.StructTypeID","methods":{"int hashCode()":{"name":"hashCode","returnType":"int","args":[],"exceptions":[]},"java.util.Collection getFieldTypeInfos()":{"name":"getFieldTypeInfos","returnType":"java.util.Collection","args":[],"exceptions":[]},"boolean equals(java.lang.Object)":{"name":"equals","returnType":"boolean","args":["java.lang.Object"],"exceptions":[]}}},"org.apache.hadoop.record.meta.VectorTypeID":{"name":"org.apache.hadoop.record.meta.VectorTypeID","methods":{"int hashCode()":{"name":"hashCode","returnType":"int","args":[],"exceptions":[]},"org.apache.hadoop.record.meta.TypeID getElementTypeID()":{"name":"getElementTypeID","returnType":"org.apache.hadoop.record.meta.TypeID","args":[],"exceptions":[]},"boolean equals(java.lang.Object)":{"name":"equals","returnType":"boolean","args":["java.lang.Object"],"exceptions":[]}}},"org.apache.hadoop.fs.XAttrSetFlag":{"name":"org.apache.hadoop.fs.XAttrSetFlag","methods":{"[Lorg.apache.hadoop.fs.XAttrSetFlag; values()":{"name":"values","returnType":"[Lorg.apache.hadoop.fs.XAttrSetFlag;","args":[],"exceptions":[]},"org.apache.hadoop.fs.XAttrSetFlag valueOf(java.lang.String)":{"name":"valueOf","returnType":"org.apache.hadoop.fs.XAttrSetFlag","args":["java.lang.String"],"exceptions":[]},"void validate(java.lang.String, boolean, java.util.EnumSet) throws java.io.IOException":{"name":"validate","returnType":"void","args":["java.lang.String","boolean","java.util.EnumSet"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.util.StringInterner":{"name":"org.apache.hadoop.util.StringInterner","methods":{"java.lang.String strongIntern(java.lang.String)":{"name":"strongIntern","returnType":"java.lang.String","args":["java.lang.String"],"exceptions":[]},"java.lang.String weakIntern(java.lang.String)":{"name":"weakIntern","returnType":"java.lang.String","args":["java.lang.String"],"exceptions":[]}}},"org.apache.hadoop.io.file.tfile.MetaBlockDoesNotExist":{"name":"org.apache.hadoop.io.file.tfile.MetaBlockDoesNotExist","methods":{}},"org.apache.hadoop.fs.FileAlreadyExistsException":{"name":"org.apache.hadoop.fs.FileAlreadyExistsException","methods":{}},"org.apache.hadoop.io.AbstractMapWritable":{"name":"org.apache.hadoop.io.AbstractMapWritable","methods":{"void write(java.io.DataOutput) throws java.io.IOException":{"name":"write","returnType":"void","args":["java.io.DataOutput"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.conf.Configuration getConf()":{"name":"getConf","returnType":"org.apache.hadoop.conf.Configuration","args":[],"exceptions":[]},"void setConf(org.apache.hadoop.conf.Configuration)":{"name":"setConf","returnType":"void","args":["org.apache.hadoop.conf.Configuration"],"exceptions":[]},"void readFields(java.io.DataInput) throws java.io.IOException":{"name":"readFields","returnType":"void","args":["java.io.DataInput"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.fs.RawLocalFileSystem":{"name":"org.apache.hadoop.fs.RawLocalFileSystem","methods":{"void createSymlink(org.apache.hadoop.fs.Path, org.apache.hadoop.fs.Path, boolean) throws java.io.IOException":{"name":"createSymlink","returnType":"void","args":["org.apache.hadoop.fs.Path","org.apache.hadoop.fs.Path","boolean"],"exceptions":["java.io.IOException"]},"void initialize(java.net.URI, org.apache.hadoop.conf.Configuration) throws java.io.IOException":{"name":"initialize","returnType":"void","args":["java.net.URI","org.apache.hadoop.conf.Configuration"],"exceptions":["java.io.IOException"]},"void setOwner(org.apache.hadoop.fs.Path, java.lang.String, java.lang.String) throws java.io.IOException":{"name":"setOwner","returnType":"void","args":["org.apache.hadoop.fs.Path","java.lang.String","java.lang.String"],"exceptions":["java.io.IOException"]},"java.io.File pathToFile(org.apache.hadoop.fs.Path)":{"name":"pathToFile","returnType":"java.io.File","args":["org.apache.hadoop.fs.Path"],"exceptions":[]},"void moveFromLocalFile(org.apache.hadoop.fs.Path, org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"moveFromLocalFile","returnType":"void","args":["org.apache.hadoop.fs.Path","org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.FSDataOutputStream append(org.apache.hadoop.fs.Path, int, org.apache.hadoop.util.Progressable) throws java.io.IOException":{"name":"append","returnType":"org.apache.hadoop.fs.FSDataOutputStream","args":["org.apache.hadoop.fs.Path","int","org.apache.hadoop.util.Progressable"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.FSDataInputStream open(org.apache.hadoop.fs.Path, int) throws java.io.IOException":{"name":"open","returnType":"org.apache.hadoop.fs.FSDataInputStream","args":["org.apache.hadoop.fs.Path","int"],"exceptions":["java.io.IOException"]},"void setTimes(org.apache.hadoop.fs.Path, long, long) throws java.io.IOException":{"name":"setTimes","returnType":"void","args":["org.apache.hadoop.fs.Path","long","long"],"exceptions":["java.io.IOException"]},"java.net.URI getUri()":{"name":"getUri","returnType":"java.net.URI","args":[],"exceptions":[]},"org.apache.hadoop.fs.Path getHomeDirectory()":{"name":"getHomeDirectory","returnType":"org.apache.hadoop.fs.Path","args":[],"exceptions":[]},"void completeLocalOutput(org.apache.hadoop.fs.Path, org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"completeLocalOutput","returnType":"void","args":["org.apache.hadoop.fs.Path","org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.FSDataOutputStream create(org.apache.hadoop.fs.Path, boolean, int, short, long, org.apache.hadoop.util.Progressable) throws java.io.IOException":{"name":"create","returnType":"org.apache.hadoop.fs.FSDataOutputStream","args":["org.apache.hadoop.fs.Path","boolean","int","short","long","org.apache.hadoop.util.Progressable"],"exceptions":["java.io.IOException"]},"boolean rename(org.apache.hadoop.fs.Path, org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"rename","returnType":"boolean","args":["org.apache.hadoop.fs.Path","org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.FileStatus getFileStatus(org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"getFileStatus","returnType":"org.apache.hadoop.fs.FileStatus","args":["org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"java.lang.String toString()":{"name":"toString","returnType":"java.lang.String","args":[],"exceptions":[]},"[Lorg.apache.hadoop.fs.FileStatus; listStatus(org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"listStatus","returnType":"[Lorg.apache.hadoop.fs.FileStatus;","args":["org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.Path getWorkingDirectory()":{"name":"getWorkingDirectory","returnType":"org.apache.hadoop.fs.Path","args":[],"exceptions":[]},"void setWorkingDirectory(org.apache.hadoop.fs.Path)":{"name":"setWorkingDirectory","returnType":"void","args":["org.apache.hadoop.fs.Path"],"exceptions":[]},"void setPermission(org.apache.hadoop.fs.Path, org.apache.hadoop.fs.permission.FsPermission) throws java.io.IOException":{"name":"setPermission","returnType":"void","args":["org.apache.hadoop.fs.Path","org.apache.hadoop.fs.permission.FsPermission"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.Path startLocalOutput(org.apache.hadoop.fs.Path, org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"startLocalOutput","returnType":"org.apache.hadoop.fs.Path","args":["org.apache.hadoop.fs.Path","org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.FsStatus getStatus(org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"getStatus","returnType":"org.apache.hadoop.fs.FsStatus","args":["org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.FSDataOutputStream createNonRecursive(org.apache.hadoop.fs.Path, org.apache.hadoop.fs.permission.FsPermission, java.util.EnumSet, int, short, long, org.apache.hadoop.util.Progressable) throws java.io.IOException":{"name":"createNonRecursive","returnType":"org.apache.hadoop.fs.FSDataOutputStream","args":["org.apache.hadoop.fs.Path","org.apache.hadoop.fs.permission.FsPermission","java.util.EnumSet","int","short","long","org.apache.hadoop.util.Progressable"],"exceptions":["java.io.IOException"]},"void useStatIfAvailable()":{"name":"useStatIfAvailable","returnType":"void","args":[],"exceptions":[]},"boolean mkdirs(org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"mkdirs","returnType":"boolean","args":["org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.Path getLinkTarget(org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"getLinkTarget","returnType":"org.apache.hadoop.fs.Path","args":["org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.FileStatus getFileLinkStatus(org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"getFileLinkStatus","returnType":"org.apache.hadoop.fs.FileStatus","args":["org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"boolean delete(org.apache.hadoop.fs.Path, boolean) throws java.io.IOException":{"name":"delete","returnType":"boolean","args":["org.apache.hadoop.fs.Path","boolean"],"exceptions":["java.io.IOException"]},"boolean mkdirs(org.apache.hadoop.fs.Path, org.apache.hadoop.fs.permission.FsPermission) throws java.io.IOException":{"name":"mkdirs","returnType":"boolean","args":["org.apache.hadoop.fs.Path","org.apache.hadoop.fs.permission.FsPermission"],"exceptions":["java.io.IOException"]},"boolean supportsSymlinks()":{"name":"supportsSymlinks","returnType":"boolean","args":[],"exceptions":[]},"org.apache.hadoop.fs.FSDataOutputStream createNonRecursive(org.apache.hadoop.fs.Path, org.apache.hadoop.fs.permission.FsPermission, boolean, int, short, long, org.apache.hadoop.util.Progressable) throws java.io.IOException":{"name":"createNonRecursive","returnType":"org.apache.hadoop.fs.FSDataOutputStream","args":["org.apache.hadoop.fs.Path","org.apache.hadoop.fs.permission.FsPermission","boolean","int","short","long","org.apache.hadoop.util.Progressable"],"exceptions":["java.io.IOException"]},"boolean truncate(org.apache.hadoop.fs.Path, long) throws java.io.IOException":{"name":"truncate","returnType":"boolean","args":["org.apache.hadoop.fs.Path","long"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.FSDataOutputStream create(org.apache.hadoop.fs.Path, org.apache.hadoop.fs.permission.FsPermission, boolean, int, short, long, org.apache.hadoop.util.Progressable) throws java.io.IOException":{"name":"create","returnType":"org.apache.hadoop.fs.FSDataOutputStream","args":["org.apache.hadoop.fs.Path","org.apache.hadoop.fs.permission.FsPermission","boolean","int","short","long","org.apache.hadoop.util.Progressable"],"exceptions":["java.io.IOException"]},"void close() throws java.io.IOException":{"name":"close","returnType":"void","args":[],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.record.compiler.generated.RccTokenManager":{"name":"org.apache.hadoop.record.compiler.generated.RccTokenManager","methods":{"void SwitchTo(int)":{"name":"SwitchTo","returnType":"void","args":["int"],"exceptions":[]},"void ReInit(org.apache.hadoop.record.compiler.generated.SimpleCharStream)":{"name":"ReInit","returnType":"void","args":["org.apache.hadoop.record.compiler.generated.SimpleCharStream"],"exceptions":[]},"void setDebugStream(java.io.PrintStream)":{"name":"setDebugStream","returnType":"void","args":["java.io.PrintStream"],"exceptions":[]},"org.apache.hadoop.record.compiler.generated.Token getNextToken()":{"name":"getNextToken","returnType":"org.apache.hadoop.record.compiler.generated.Token","args":[],"exceptions":[]},"void ReInit(org.apache.hadoop.record.compiler.generated.SimpleCharStream, int)":{"name":"ReInit","returnType":"void","args":["org.apache.hadoop.record.compiler.generated.SimpleCharStream","int"],"exceptions":[]}}},"org.apache.hadoop.record.Utils":{"name":"org.apache.hadoop.record.Utils","methods":{"int readVInt(java.io.DataInput) throws java.io.IOException":{"name":"readVInt","returnType":"int","args":["java.io.DataInput"],"exceptions":["java.io.IOException"]},"void writeVInt(java.io.DataOutput, int) throws java.io.IOException":{"name":"writeVInt","returnType":"void","args":["java.io.DataOutput","int"],"exceptions":["java.io.IOException"]},"long readVLong(java.io.DataInput) throws java.io.IOException":{"name":"readVLong","returnType":"long","args":["java.io.DataInput"],"exceptions":["java.io.IOException"]},"int compareBytes([B, int, int, [B, int, int)":{"name":"compareBytes","returnType":"int","args":["[B","int","int","[B","int","int"],"exceptions":[]},"float readFloat([B, int)":{"name":"readFloat","returnType":"float","args":["[B","int"],"exceptions":[]},"long readVLong([B, int) throws java.io.IOException":{"name":"readVLong","returnType":"long","args":["[B","int"],"exceptions":["java.io.IOException"]},"int readVInt([B, int) throws java.io.IOException":{"name":"readVInt","returnType":"int","args":["[B","int"],"exceptions":["java.io.IOException"]},"int getVIntSize(long)":{"name":"getVIntSize","returnType":"int","args":["long"],"exceptions":[]},"void writeVLong(java.io.DataOutput, long) throws java.io.IOException":{"name":"writeVLong","returnType":"void","args":["java.io.DataOutput","long"],"exceptions":["java.io.IOException"]},"double readDouble([B, int)":{"name":"readDouble","returnType":"double","args":["[B","int"],"exceptions":[]}}},"org.apache.hadoop.record.meta.FieldTypeInfo":{"name":"org.apache.hadoop.record.meta.FieldTypeInfo","methods":{"int hashCode()":{"name":"hashCode","returnType":"int","args":[],"exceptions":[]},"java.lang.String getFieldID()":{"name":"getFieldID","returnType":"java.lang.String","args":[],"exceptions":[]},"boolean equals(java.lang.Object)":{"name":"equals","returnType":"boolean","args":["java.lang.Object"],"exceptions":[]},"boolean equals(org.apache.hadoop.record.meta.FieldTypeInfo)":{"name":"equals","returnType":"boolean","args":["org.apache.hadoop.record.meta.FieldTypeInfo"],"exceptions":[]},"org.apache.hadoop.record.meta.TypeID getTypeID()":{"name":"getTypeID","returnType":"org.apache.hadoop.record.meta.TypeID","args":[],"exceptions":[]}}},"org.apache.hadoop.io.MultipleIOException":{"name":"org.apache.hadoop.io.MultipleIOException","methods":{"java.util.List getExceptions()":{"name":"getExceptions","returnType":"java.util.List","args":[],"exceptions":[]},"java.io.IOException createIOException(java.util.List)":{"name":"createIOException","returnType":"java.io.IOException","args":["java.util.List"],"exceptions":[]}}},"org.apache.hadoop.fs.UnsupportedFileSystemException":{"name":"org.apache.hadoop.fs.UnsupportedFileSystemException","methods":{}},"org.apache.hadoop.record.BinaryRecordInput":{"name":"org.apache.hadoop.record.BinaryRecordInput","methods":{"long readLong(java.lang.String) throws java.io.IOException":{"name":"readLong","returnType":"long","args":["java.lang.String"],"exceptions":["java.io.IOException"]},"void endMap(java.lang.String) throws java.io.IOException":{"name":"endMap","returnType":"void","args":["java.lang.String"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.record.Buffer readBuffer(java.lang.String) throws java.io.IOException":{"name":"readBuffer","returnType":"org.apache.hadoop.record.Buffer","args":["java.lang.String"],"exceptions":["java.io.IOException"]},"void endVector(java.lang.String) throws java.io.IOException":{"name":"endVector","returnType":"void","args":["java.lang.String"],"exceptions":["java.io.IOException"]},"byte readByte(java.lang.String) throws java.io.IOException":{"name":"readByte","returnType":"byte","args":["java.lang.String"],"exceptions":["java.io.IOException"]},"float readFloat(java.lang.String) throws java.io.IOException":{"name":"readFloat","returnType":"float","args":["java.lang.String"],"exceptions":["java.io.IOException"]},"void startRecord(java.lang.String) throws java.io.IOException":{"name":"startRecord","returnType":"void","args":["java.lang.String"],"exceptions":["java.io.IOException"]},"double readDouble(java.lang.String) throws java.io.IOException":{"name":"readDouble","returnType":"double","args":["java.lang.String"],"exceptions":["java.io.IOException"]},"void endRecord(java.lang.String) throws java.io.IOException":{"name":"endRecord","returnType":"void","args":["java.lang.String"],"exceptions":["java.io.IOException"]},"java.lang.String readString(java.lang.String) throws java.io.IOException":{"name":"readString","returnType":"java.lang.String","args":["java.lang.String"],"exceptions":["java.io.IOException"]},"int readInt(java.lang.String) throws java.io.IOException":{"name":"readInt","returnType":"int","args":["java.lang.String"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.record.BinaryRecordInput get(java.io.DataInput)":{"name":"get","returnType":"org.apache.hadoop.record.BinaryRecordInput","args":["java.io.DataInput"],"exceptions":[]},"boolean readBool(java.lang.String) throws java.io.IOException":{"name":"readBool","returnType":"boolean","args":["java.lang.String"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.record.Index startVector(java.lang.String) throws java.io.IOException":{"name":"startVector","returnType":"org.apache.hadoop.record.Index","args":["java.lang.String"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.record.Index startMap(java.lang.String) throws java.io.IOException":{"name":"startMap","returnType":"org.apache.hadoop.record.Index","args":["java.lang.String"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.record.compiler.generated.TokenMgrError":{"name":"org.apache.hadoop.record.compiler.generated.TokenMgrError","methods":{"java.lang.String getMessage()":{"name":"getMessage","returnType":"java.lang.String","args":[],"exceptions":[]}}},"org.apache.hadoop.io.FloatWritable":{"name":"org.apache.hadoop.io.FloatWritable","methods":{"float get()":{"name":"get","returnType":"float","args":[],"exceptions":[]},"int hashCode()":{"name":"hashCode","returnType":"int","args":[],"exceptions":[]},"java.lang.String toString()":{"name":"toString","returnType":"java.lang.String","args":[],"exceptions":[]},"void write(java.io.DataOutput) throws java.io.IOException":{"name":"write","returnType":"void","args":["java.io.DataOutput"],"exceptions":["java.io.IOException"]},"void set(float)":{"name":"set","returnType":"void","args":["float"],"exceptions":[]},"int compareTo(org.apache.hadoop.io.FloatWritable)":{"name":"compareTo","returnType":"int","args":["org.apache.hadoop.io.FloatWritable"],"exceptions":[]},"int compareTo(java.lang.Object)":{"name":"compareTo","returnType":"int","args":["java.lang.Object"],"exceptions":[]},"boolean equals(java.lang.Object)":{"name":"equals","returnType":"boolean","args":["java.lang.Object"],"exceptions":[]},"void readFields(java.io.DataInput) throws java.io.IOException":{"name":"readFields","returnType":"void","args":["java.io.DataInput"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.fs.ChecksumException":{"name":"org.apache.hadoop.fs.ChecksumException","methods":{"long getPos()":{"name":"getPos","returnType":"long","args":[],"exceptions":[]}}},"org.apache.hadoop.util.Progressable":{"name":"org.apache.hadoop.util.Progressable","methods":{"void progress()":{"name":"progress","returnType":"void","args":[],"exceptions":[]}}},"org.apache.hadoop.util.bloom.DynamicBloomFilter":{"name":"org.apache.hadoop.util.bloom.DynamicBloomFilter","methods":{"java.lang.String toString()":{"name":"toString","returnType":"java.lang.String","args":[],"exceptions":[]},"void write(java.io.DataOutput) throws java.io.IOException":{"name":"write","returnType":"void","args":["java.io.DataOutput"],"exceptions":["java.io.IOException"]},"void add(org.apache.hadoop.util.bloom.Key)":{"name":"add","returnType":"void","args":["org.apache.hadoop.util.bloom.Key"],"exceptions":[]},"void not()":{"name":"not","returnType":"void","args":[],"exceptions":[]},"boolean membershipTest(org.apache.hadoop.util.bloom.Key)":{"name":"membershipTest","returnType":"boolean","args":["org.apache.hadoop.util.bloom.Key"],"exceptions":[]},"void and(org.apache.hadoop.util.bloom.Filter)":{"name":"and","returnType":"void","args":["org.apache.hadoop.util.bloom.Filter"],"exceptions":[]},"void readFields(java.io.DataInput) throws java.io.IOException":{"name":"readFields","returnType":"void","args":["java.io.DataInput"],"exceptions":["java.io.IOException"]},"void or(org.apache.hadoop.util.bloom.Filter)":{"name":"or","returnType":"void","args":["org.apache.hadoop.util.bloom.Filter"],"exceptions":[]},"void xor(org.apache.hadoop.util.bloom.Filter)":{"name":"xor","returnType":"void","args":["org.apache.hadoop.util.bloom.Filter"],"exceptions":[]}}},"org.apache.hadoop.util.bloom.HashFunction":{"name":"org.apache.hadoop.util.bloom.HashFunction","methods":{"[I hash(org.apache.hadoop.util.bloom.Key)":{"name":"hash","returnType":"[I","args":["org.apache.hadoop.util.bloom.Key"],"exceptions":[]},"void clear()":{"name":"clear","returnType":"void","args":[],"exceptions":[]}}},"org.apache.hadoop.tracing.SpanReceiverInfoBuilder":{"name":"org.apache.hadoop.tracing.SpanReceiverInfoBuilder","methods":{"org.apache.hadoop.tracing.SpanReceiverInfo build()":{"name":"build","returnType":"org.apache.hadoop.tracing.SpanReceiverInfo","args":[],"exceptions":[]},"void addConfigurationPair(java.lang.String, java.lang.String)":{"name":"addConfigurationPair","returnType":"void","args":["java.lang.String","java.lang.String"],"exceptions":[]}}},"org.apache.hadoop.io.LongWritable":{"name":"org.apache.hadoop.io.LongWritable","methods":{"int compareTo(org.apache.hadoop.io.LongWritable)":{"name":"compareTo","returnType":"int","args":["org.apache.hadoop.io.LongWritable"],"exceptions":[]},"int hashCode()":{"name":"hashCode","returnType":"int","args":[],"exceptions":[]},"void set(long)":{"name":"set","returnType":"void","args":["long"],"exceptions":[]},"long get()":{"name":"get","returnType":"long","args":[],"exceptions":[]},"java.lang.String toString()":{"name":"toString","returnType":"java.lang.String","args":[],"exceptions":[]},"void write(java.io.DataOutput) throws java.io.IOException":{"name":"write","returnType":"void","args":["java.io.DataOutput"],"exceptions":["java.io.IOException"]},"int compareTo(java.lang.Object)":{"name":"compareTo","returnType":"int","args":["java.lang.Object"],"exceptions":[]},"boolean equals(java.lang.Object)":{"name":"equals","returnType":"boolean","args":["java.lang.Object"],"exceptions":[]},"void readFields(java.io.DataInput) throws java.io.IOException":{"name":"readFields","returnType":"void","args":["java.io.DataInput"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.fs.permission.FsPermission":{"name":"org.apache.hadoop.fs.permission.FsPermission","methods":{"org.apache.hadoop.fs.permission.FsAction getUserAction()":{"name":"getUserAction","returnType":"org.apache.hadoop.fs.permission.FsAction","args":[],"exceptions":[]},"int hashCode()":{"name":"hashCode","returnType":"int","args":[],"exceptions":[]},"org.apache.hadoop.fs.permission.FsPermission valueOf(java.lang.String)":{"name":"valueOf","returnType":"org.apache.hadoop.fs.permission.FsPermission","args":["java.lang.String"],"exceptions":[]},"void fromShort(short)":{"name":"fromShort","returnType":"void","args":["short"],"exceptions":[]},"org.apache.hadoop.fs.permission.FsPermission createImmutable(short)":{"name":"createImmutable","returnType":"org.apache.hadoop.fs.permission.FsPermission","args":["short"],"exceptions":[]},"org.apache.hadoop.fs.permission.FsPermission read(java.io.DataInput) throws java.io.IOException":{"name":"read","returnType":"org.apache.hadoop.fs.permission.FsPermission","args":["java.io.DataInput"],"exceptions":["java.io.IOException"]},"short toShort()":{"name":"toShort","returnType":"short","args":[],"exceptions":[]},"org.apache.hadoop.fs.permission.FsAction getGroupAction()":{"name":"getGroupAction","returnType":"org.apache.hadoop.fs.permission.FsAction","args":[],"exceptions":[]},"boolean getEncryptedBit()":{"name":"getEncryptedBit","returnType":"boolean","args":[],"exceptions":[]},"org.apache.hadoop.fs.permission.FsAction getOtherAction()":{"name":"getOtherAction","returnType":"org.apache.hadoop.fs.permission.FsAction","args":[],"exceptions":[]},"void setUMask(org.apache.hadoop.conf.Configuration, org.apache.hadoop.fs.permission.FsPermission)":{"name":"setUMask","returnType":"void","args":["org.apache.hadoop.conf.Configuration","org.apache.hadoop.fs.permission.FsPermission"],"exceptions":[]},"boolean equals(java.lang.Object)":{"name":"equals","returnType":"boolean","args":["java.lang.Object"],"exceptions":[]},"org.apache.hadoop.fs.permission.FsPermission getFileDefault()":{"name":"getFileDefault","returnType":"org.apache.hadoop.fs.permission.FsPermission","args":[],"exceptions":[]},"org.apache.hadoop.fs.permission.FsPermission getDirDefault()":{"name":"getDirDefault","returnType":"org.apache.hadoop.fs.permission.FsPermission","args":[],"exceptions":[]},"org.apache.hadoop.fs.permission.FsPermission getCachePoolDefault()":{"name":"getCachePoolDefault","returnType":"org.apache.hadoop.fs.permission.FsPermission","args":[],"exceptions":[]},"org.apache.hadoop.fs.permission.FsPermission applyUMask(org.apache.hadoop.fs.permission.FsPermission)":{"name":"applyUMask","returnType":"org.apache.hadoop.fs.permission.FsPermission","args":["org.apache.hadoop.fs.permission.FsPermission"],"exceptions":[]},"java.lang.String toString()":{"name":"toString","returnType":"java.lang.String","args":[],"exceptions":[]},"void write(java.io.DataOutput) throws java.io.IOException":{"name":"write","returnType":"void","args":["java.io.DataOutput"],"exceptions":["java.io.IOException"]},"short toExtendedShort()":{"name":"toExtendedShort","returnType":"short","args":[],"exceptions":[]},"boolean getStickyBit()":{"name":"getStickyBit","returnType":"boolean","args":[],"exceptions":[]},"boolean getAclBit()":{"name":"getAclBit","returnType":"boolean","args":[],"exceptions":[]},"org.apache.hadoop.fs.permission.FsPermission getDefault()":{"name":"getDefault","returnType":"org.apache.hadoop.fs.permission.FsPermission","args":[],"exceptions":[]},"org.apache.hadoop.fs.permission.FsPermission getUMask(org.apache.hadoop.conf.Configuration)":{"name":"getUMask","returnType":"org.apache.hadoop.fs.permission.FsPermission","args":["org.apache.hadoop.conf.Configuration"],"exceptions":[]},"void readFields(java.io.DataInput) throws java.io.IOException":{"name":"readFields","returnType":"void","args":["java.io.DataInput"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.io.DefaultStringifier":{"name":"org.apache.hadoop.io.DefaultStringifier","methods":{"java.lang.Object load(org.apache.hadoop.conf.Configuration, java.lang.String, java.lang.Class) throws java.io.IOException":{"name":"load","returnType":"java.lang.Object","args":["org.apache.hadoop.conf.Configuration","java.lang.String","java.lang.Class"],"exceptions":["java.io.IOException"]},"void store(org.apache.hadoop.conf.Configuration, java.lang.Object, java.lang.String) throws java.io.IOException":{"name":"store","returnType":"void","args":["org.apache.hadoop.conf.Configuration","java.lang.Object","java.lang.String"],"exceptions":["java.io.IOException"]},"void storeArray(org.apache.hadoop.conf.Configuration, [Ljava.lang.Object;, java.lang.String) throws java.io.IOException":{"name":"storeArray","returnType":"void","args":["org.apache.hadoop.conf.Configuration","[Ljava.lang.Object;","java.lang.String"],"exceptions":["java.io.IOException"]},"[Ljava.lang.Object; loadArray(org.apache.hadoop.conf.Configuration, java.lang.String, java.lang.Class) throws java.io.IOException":{"name":"loadArray","returnType":"[Ljava.lang.Object;","args":["org.apache.hadoop.conf.Configuration","java.lang.String","java.lang.Class"],"exceptions":["java.io.IOException"]},"java.lang.Object fromString(java.lang.String) throws java.io.IOException":{"name":"fromString","returnType":"java.lang.Object","args":["java.lang.String"],"exceptions":["java.io.IOException"]},"java.lang.String toString(java.lang.Object) throws java.io.IOException":{"name":"toString","returnType":"java.lang.String","args":["java.lang.Object"],"exceptions":["java.io.IOException"]},"void close() throws java.io.IOException":{"name":"close","returnType":"void","args":[],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.conf.Configured":{"name":"org.apache.hadoop.conf.Configured","methods":{"org.apache.hadoop.conf.Configuration getConf()":{"name":"getConf","returnType":"org.apache.hadoop.conf.Configuration","args":[],"exceptions":[]},"void setConf(org.apache.hadoop.conf.Configuration)":{"name":"setConf","returnType":"void","args":["org.apache.hadoop.conf.Configuration"],"exceptions":[]}}},"org.apache.hadoop.fs.Path":{"name":"org.apache.hadoop.fs.Path","methods":{"boolean isAbsolute()":{"name":"isAbsolute","returnType":"boolean","args":[],"exceptions":[]},"int hashCode()":{"name":"hashCode","returnType":"int","args":[],"exceptions":[]},"org.apache.hadoop.fs.Path getParent()":{"name":"getParent","returnType":"org.apache.hadoop.fs.Path","args":[],"exceptions":[]},"org.apache.hadoop.fs.Path makeQualified(java.net.URI, org.apache.hadoop.fs.Path)":{"name":"makeQualified","returnType":"org.apache.hadoop.fs.Path","args":["java.net.URI","org.apache.hadoop.fs.Path"],"exceptions":[]},"org.apache.hadoop.fs.Path getPathWithoutSchemeAndAuthority(org.apache.hadoop.fs.Path)":{"name":"getPathWithoutSchemeAndAuthority","returnType":"org.apache.hadoop.fs.Path","args":["org.apache.hadoop.fs.Path"],"exceptions":[]},"int compareTo(java.lang.Object)":{"name":"compareTo","returnType":"int","args":["java.lang.Object"],"exceptions":[]},"boolean equals(java.lang.Object)":{"name":"equals","returnType":"boolean","args":["java.lang.Object"],"exceptions":[]},"boolean isRoot()":{"name":"isRoot","returnType":"boolean","args":[],"exceptions":[]},"boolean isWindowsAbsolutePath(java.lang.String, boolean)":{"name":"isWindowsAbsolutePath","returnType":"boolean","args":["java.lang.String","boolean"],"exceptions":[]},"boolean isUriPathAbsolute()":{"name":"isUriPathAbsolute","returnType":"boolean","args":[],"exceptions":[]},"org.apache.hadoop.fs.Path suffix(java.lang.String)":{"name":"suffix","returnType":"org.apache.hadoop.fs.Path","args":["java.lang.String"],"exceptions":[]},"java.lang.String toString()":{"name":"toString","returnType":"java.lang.String","args":[],"exceptions":[]},"org.apache.hadoop.fs.FileSystem getFileSystem(org.apache.hadoop.conf.Configuration) throws java.io.IOException":{"name":"getFileSystem","returnType":"org.apache.hadoop.fs.FileSystem","args":["org.apache.hadoop.conf.Configuration"],"exceptions":["java.io.IOException"]},"java.net.URI toUri()":{"name":"toUri","returnType":"java.net.URI","args":[],"exceptions":[]},"java.lang.String getName()":{"name":"getName","returnType":"java.lang.String","args":[],"exceptions":[]},"org.apache.hadoop.fs.Path mergePaths(org.apache.hadoop.fs.Path, org.apache.hadoop.fs.Path)":{"name":"mergePaths","returnType":"org.apache.hadoop.fs.Path","args":["org.apache.hadoop.fs.Path","org.apache.hadoop.fs.Path"],"exceptions":[]},"org.apache.hadoop.fs.Path makeQualified(org.apache.hadoop.fs.FileSystem)":{"name":"makeQualified","returnType":"org.apache.hadoop.fs.Path","args":["org.apache.hadoop.fs.FileSystem"],"exceptions":[]},"boolean isAbsoluteAndSchemeAuthorityNull()":{"name":"isAbsoluteAndSchemeAuthorityNull","returnType":"boolean","args":[],"exceptions":[]},"int depth()":{"name":"depth","returnType":"int","args":[],"exceptions":[]}}},"org.apache.hadoop.io.GenericWritable":{"name":"org.apache.hadoop.io.GenericWritable","methods":{"void set(org.apache.hadoop.io.Writable)":{"name":"set","returnType":"void","args":["org.apache.hadoop.io.Writable"],"exceptions":[]},"java.lang.String toString()":{"name":"toString","returnType":"java.lang.String","args":[],"exceptions":[]},"void write(java.io.DataOutput) throws java.io.IOException":{"name":"write","returnType":"void","args":["java.io.DataOutput"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.conf.Configuration getConf()":{"name":"getConf","returnType":"org.apache.hadoop.conf.Configuration","args":[],"exceptions":[]},"void setConf(org.apache.hadoop.conf.Configuration)":{"name":"setConf","returnType":"void","args":["org.apache.hadoop.conf.Configuration"],"exceptions":[]},"org.apache.hadoop.io.Writable get()":{"name":"get","returnType":"org.apache.hadoop.io.Writable","args":[],"exceptions":[]},"void readFields(java.io.DataInput) throws java.io.IOException":{"name":"readFields","returnType":"void","args":["java.io.DataInput"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.conf.Configurable":{"name":"org.apache.hadoop.conf.Configurable","methods":{"org.apache.hadoop.conf.Configuration getConf()":{"name":"getConf","returnType":"org.apache.hadoop.conf.Configuration","args":[],"exceptions":[]},"void setConf(org.apache.hadoop.conf.Configuration)":{"name":"setConf","returnType":"void","args":["org.apache.hadoop.conf.Configuration"],"exceptions":[]}}},"org.apache.hadoop.io.MapFile":{"name":"org.apache.hadoop.io.MapFile","methods":{"void main([Ljava.lang.String;) throws java.lang.Exception":{"name":"main","returnType":"void","args":["[Ljava.lang.String;"],"exceptions":["java.lang.Exception"]},"long fix(org.apache.hadoop.fs.FileSystem, org.apache.hadoop.fs.Path, java.lang.Class, java.lang.Class, boolean, org.apache.hadoop.conf.Configuration) throws java.lang.Exception":{"name":"fix","returnType":"long","args":["org.apache.hadoop.fs.FileSystem","org.apache.hadoop.fs.Path","java.lang.Class","java.lang.Class","boolean","org.apache.hadoop.conf.Configuration"],"exceptions":["java.lang.Exception"]},"void delete(org.apache.hadoop.fs.FileSystem, java.lang.String) throws java.io.IOException":{"name":"delete","returnType":"void","args":["org.apache.hadoop.fs.FileSystem","java.lang.String"],"exceptions":["java.io.IOException"]},"void rename(org.apache.hadoop.fs.FileSystem, java.lang.String, java.lang.String) throws java.io.IOException":{"name":"rename","returnType":"void","args":["org.apache.hadoop.fs.FileSystem","java.lang.String","java.lang.String"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.record.meta.Utils":{"name":"org.apache.hadoop.record.meta.Utils","methods":{"void skip(org.apache.hadoop.record.RecordInput, java.lang.String, org.apache.hadoop.record.meta.TypeID) throws java.io.IOException":{"name":"skip","returnType":"void","args":["org.apache.hadoop.record.RecordInput","java.lang.String","org.apache.hadoop.record.meta.TypeID"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.fs.ReadOption":{"name":"org.apache.hadoop.fs.ReadOption","methods":{"[Lorg.apache.hadoop.fs.ReadOption; values()":{"name":"values","returnType":"[Lorg.apache.hadoop.fs.ReadOption;","args":[],"exceptions":[]},"org.apache.hadoop.fs.ReadOption valueOf(java.lang.String)":{"name":"valueOf","returnType":"org.apache.hadoop.fs.ReadOption","args":["java.lang.String"],"exceptions":[]}}},"org.apache.hadoop.fs.AvroFSInput":{"name":"org.apache.hadoop.fs.AvroFSInput","methods":{"long tell() throws java.io.IOException":{"name":"tell","returnType":"long","args":[],"exceptions":["java.io.IOException"]},"void seek(long) throws java.io.IOException":{"name":"seek","returnType":"void","args":["long"],"exceptions":["java.io.IOException"]},"int read([B, int, int) throws java.io.IOException":{"name":"read","returnType":"int","args":["[B","int","int"],"exceptions":["java.io.IOException"]},"long length()":{"name":"length","returnType":"long","args":[],"exceptions":[]},"void close() throws java.io.IOException":{"name":"close","returnType":"void","args":[],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.record.meta.TypeID":{"name":"org.apache.hadoop.record.meta.TypeID","methods":{"int hashCode()":{"name":"hashCode","returnType":"int","args":[],"exceptions":[]},"byte getTypeVal()":{"name":"getTypeVal","returnType":"byte","args":[],"exceptions":[]},"boolean equals(java.lang.Object)":{"name":"equals","returnType":"boolean","args":["java.lang.Object"],"exceptions":[]}}},"org.apache.hadoop.util.bloom.CountingBloomFilter":{"name":"org.apache.hadoop.util.bloom.CountingBloomFilter","methods":{"java.lang.String toString()":{"name":"toString","returnType":"java.lang.String","args":[],"exceptions":[]},"void write(java.io.DataOutput) throws java.io.IOException":{"name":"write","returnType":"void","args":["java.io.DataOutput"],"exceptions":["java.io.IOException"]},"void add(org.apache.hadoop.util.bloom.Key)":{"name":"add","returnType":"void","args":["org.apache.hadoop.util.bloom.Key"],"exceptions":[]},"void not()":{"name":"not","returnType":"void","args":[],"exceptions":[]},"boolean membershipTest(org.apache.hadoop.util.bloom.Key)":{"name":"membershipTest","returnType":"boolean","args":["org.apache.hadoop.util.bloom.Key"],"exceptions":[]},"void and(org.apache.hadoop.util.bloom.Filter)":{"name":"and","returnType":"void","args":["org.apache.hadoop.util.bloom.Filter"],"exceptions":[]},"void delete(org.apache.hadoop.util.bloom.Key)":{"name":"delete","returnType":"void","args":["org.apache.hadoop.util.bloom.Key"],"exceptions":[]},"void readFields(java.io.DataInput) throws java.io.IOException":{"name":"readFields","returnType":"void","args":["java.io.DataInput"],"exceptions":["java.io.IOException"]},"void or(org.apache.hadoop.util.bloom.Filter)":{"name":"or","returnType":"void","args":["org.apache.hadoop.util.bloom.Filter"],"exceptions":[]},"void xor(org.apache.hadoop.util.bloom.Filter)":{"name":"xor","returnType":"void","args":["org.apache.hadoop.util.bloom.Filter"],"exceptions":[]},"int approximateCount(org.apache.hadoop.util.bloom.Key)":{"name":"approximateCount","returnType":"int","args":["org.apache.hadoop.util.bloom.Key"],"exceptions":[]}}},"org.apache.hadoop.util.bloom.RetouchedBloomFilter":{"name":"org.apache.hadoop.util.bloom.RetouchedBloomFilter","methods":{"void selectiveClearing(org.apache.hadoop.util.bloom.Key, short)":{"name":"selectiveClearing","returnType":"void","args":["org.apache.hadoop.util.bloom.Key","short"],"exceptions":[]},"void write(java.io.DataOutput) throws java.io.IOException":{"name":"write","returnType":"void","args":["java.io.DataOutput"],"exceptions":["java.io.IOException"]},"void addFalsePositive([Lorg.apache.hadoop.util.bloom.Key;)":{"name":"addFalsePositive","returnType":"void","args":["[Lorg.apache.hadoop.util.bloom.Key;"],"exceptions":[]},"void add(org.apache.hadoop.util.bloom.Key)":{"name":"add","returnType":"void","args":["org.apache.hadoop.util.bloom.Key"],"exceptions":[]},"void addFalsePositive(java.util.Collection)":{"name":"addFalsePositive","returnType":"void","args":["java.util.Collection"],"exceptions":[]},"void addFalsePositive(org.apache.hadoop.util.bloom.Key)":{"name":"addFalsePositive","returnType":"void","args":["org.apache.hadoop.util.bloom.Key"],"exceptions":[]},"void addFalsePositive(java.util.List)":{"name":"addFalsePositive","returnType":"void","args":["java.util.List"],"exceptions":[]},"void readFields(java.io.DataInput) throws java.io.IOException":{"name":"readFields","returnType":"void","args":["java.io.DataInput"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.record.compiler.generated.ParseException":{"name":"org.apache.hadoop.record.compiler.generated.ParseException","methods":{"java.lang.String getMessage()":{"name":"getMessage","returnType":"java.lang.String","args":[],"exceptions":[]}}},"org.apache.hadoop.record.compiler.generated.Token":{"name":"org.apache.hadoop.record.compiler.generated.Token","methods":{"java.lang.String toString()":{"name":"toString","returnType":"java.lang.String","args":[],"exceptions":[]},"org.apache.hadoop.record.compiler.generated.Token newToken(int)":{"name":"newToken","returnType":"org.apache.hadoop.record.compiler.generated.Token","args":["int"],"exceptions":[]}}},"org.apache.hadoop.record.compiler.JDouble":{"name":"org.apache.hadoop.record.compiler.JDouble","methods":{}},"org.apache.hadoop.io.SortedMapWritable":{"name":"org.apache.hadoop.io.SortedMapWritable","methods":{"int hashCode()":{"name":"hashCode","returnType":"int","args":[],"exceptions":[]},"java.lang.Object remove(java.lang.Object)":{"name":"remove","returnType":"java.lang.Object","args":["java.lang.Object"],"exceptions":[]},"java.util.SortedMap headMap(java.lang.Object)":{"name":"headMap","returnType":"java.util.SortedMap","args":["java.lang.Object"],"exceptions":[]},"int size()":{"name":"size","returnType":"int","args":[],"exceptions":[]},"org.apache.hadoop.io.WritableComparable firstKey()":{"name":"firstKey","returnType":"org.apache.hadoop.io.WritableComparable","args":[],"exceptions":[]},"java.util.SortedMap tailMap(org.apache.hadoop.io.WritableComparable)":{"name":"tailMap","returnType":"java.util.SortedMap","args":["org.apache.hadoop.io.WritableComparable"],"exceptions":[]},"java.util.SortedMap subMap(java.lang.Object, java.lang.Object)":{"name":"subMap","returnType":"java.util.SortedMap","args":["java.lang.Object","java.lang.Object"],"exceptions":[]},"java.util.SortedMap subMap(org.apache.hadoop.io.WritableComparable, org.apache.hadoop.io.WritableComparable)":{"name":"subMap","returnType":"java.util.SortedMap","args":["org.apache.hadoop.io.WritableComparable","org.apache.hadoop.io.WritableComparable"],"exceptions":[]},"org.apache.hadoop.io.Writable remove(java.lang.Object)":{"name":"remove","returnType":"org.apache.hadoop.io.Writable","args":["java.lang.Object"],"exceptions":[]},"java.util.Collection values()":{"name":"values","returnType":"java.util.Collection","args":[],"exceptions":[]},"boolean equals(java.lang.Object)":{"name":"equals","returnType":"boolean","args":["java.lang.Object"],"exceptions":[]},"java.util.Comparator comparator()":{"name":"comparator","returnType":"java.util.Comparator","args":[],"exceptions":[]},"org.apache.hadoop.io.Writable put(org.apache.hadoop.io.WritableComparable, org.apache.hadoop.io.Writable)":{"name":"put","returnType":"org.apache.hadoop.io.Writable","args":["org.apache.hadoop.io.WritableComparable","org.apache.hadoop.io.Writable"],"exceptions":[]},"void write(java.io.DataOutput) throws java.io.IOException":{"name":"write","returnType":"void","args":["java.io.DataOutput"],"exceptions":["java.io.IOException"]},"boolean isEmpty()":{"name":"isEmpty","returnType":"boolean","args":[],"exceptions":[]},"java.lang.Object lastKey()":{"name":"lastKey","returnType":"java.lang.Object","args":[],"exceptions":[]},"java.util.Set entrySet()":{"name":"entrySet","returnType":"java.util.Set","args":[],"exceptions":[]},"org.apache.hadoop.io.Writable get(java.lang.Object)":{"name":"get","returnType":"org.apache.hadoop.io.Writable","args":["java.lang.Object"],"exceptions":[]},"java.util.Set keySet()":{"name":"keySet","returnType":"java.util.Set","args":[],"exceptions":[]},"org.apache.hadoop.io.WritableComparable lastKey()":{"name":"lastKey","returnType":"org.apache.hadoop.io.WritableComparable","args":[],"exceptions":[]},"java.util.SortedMap tailMap(java.lang.Object)":{"name":"tailMap","returnType":"java.util.SortedMap","args":["java.lang.Object"],"exceptions":[]},"void clear()":{"name":"clear","returnType":"void","args":[],"exceptions":[]},"boolean containsKey(java.lang.Object)":{"name":"containsKey","returnType":"boolean","args":["java.lang.Object"],"exceptions":[]},"java.lang.Object get(java.lang.Object)":{"name":"get","returnType":"java.lang.Object","args":["java.lang.Object"],"exceptions":[]},"java.util.SortedMap headMap(org.apache.hadoop.io.WritableComparable)":{"name":"headMap","returnType":"java.util.SortedMap","args":["org.apache.hadoop.io.WritableComparable"],"exceptions":[]},"boolean containsValue(java.lang.Object)":{"name":"containsValue","returnType":"boolean","args":["java.lang.Object"],"exceptions":[]},"void putAll(java.util.Map)":{"name":"putAll","returnType":"void","args":["java.util.Map"],"exceptions":[]},"java.lang.Object put(java.lang.Object, java.lang.Object)":{"name":"put","returnType":"java.lang.Object","args":["java.lang.Object","java.lang.Object"],"exceptions":[]},"java.lang.Object firstKey()":{"name":"firstKey","returnType":"java.lang.Object","args":[],"exceptions":[]},"void readFields(java.io.DataInput) throws java.io.IOException":{"name":"readFields","returnType":"void","args":["java.io.DataInput"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.record.compiler.JType":{"name":"org.apache.hadoop.record.compiler.JType","methods":{}},"org.apache.hadoop.util.Tool":{"name":"org.apache.hadoop.util.Tool","methods":{"int run([Ljava.lang.String;) throws java.lang.Exception":{"name":"run","returnType":"int","args":["[Ljava.lang.String;"],"exceptions":["java.lang.Exception"]}}},"org.apache.hadoop.record.compiler.JField":{"name":"org.apache.hadoop.record.compiler.JField","methods":{}},"org.apache.hadoop.record.compiler.CodeBuffer":{"name":"org.apache.hadoop.record.compiler.CodeBuffer","methods":{"java.lang.String toString()":{"name":"toString","returnType":"java.lang.String","args":[],"exceptions":[]}}},"org.apache.hadoop.record.meta.MapTypeID":{"name":"org.apache.hadoop.record.meta.MapTypeID","methods":{"int hashCode()":{"name":"hashCode","returnType":"int","args":[],"exceptions":[]},"org.apache.hadoop.record.meta.TypeID getValueTypeID()":{"name":"getValueTypeID","returnType":"org.apache.hadoop.record.meta.TypeID","args":[],"exceptions":[]},"org.apache.hadoop.record.meta.TypeID getKeyTypeID()":{"name":"getKeyTypeID","returnType":"org.apache.hadoop.record.meta.TypeID","args":[],"exceptions":[]},"boolean equals(java.lang.Object)":{"name":"equals","returnType":"boolean","args":["java.lang.Object"],"exceptions":[]}}},"org.apache.hadoop.log.metrics.EventCounter":{"name":"org.apache.hadoop.log.metrics.EventCounter","methods":{"void append(org.apache.log4j.spi.LoggingEvent)":{"name":"append","returnType":"void","args":["org.apache.log4j.spi.LoggingEvent"],"exceptions":[]},"long getFatal()":{"name":"getFatal","returnType":"long","args":[],"exceptions":[]},"long getWarn()":{"name":"getWarn","returnType":"long","args":[],"exceptions":[]},"long getError()":{"name":"getError","returnType":"long","args":[],"exceptions":[]},"long getInfo()":{"name":"getInfo","returnType":"long","args":[],"exceptions":[]},"void close()":{"name":"close","returnType":"void","args":[],"exceptions":[]},"boolean requiresLayout()":{"name":"requiresLayout","returnType":"boolean","args":[],"exceptions":[]}}},"org.apache.hadoop.fs.permission.AccessControlException":{"name":"org.apache.hadoop.fs.permission.AccessControlException","methods":{}},"org.apache.hadoop.record.compiler.JByte":{"name":"org.apache.hadoop.record.compiler.JByte","methods":{}},"org.apache.hadoop.io.ArrayFile":{"name":"org.apache.hadoop.io.ArrayFile","methods":{}},"org.apache.hadoop.record.compiler.generated.SimpleCharStream":{"name":"org.apache.hadoop.record.compiler.generated.SimpleCharStream","methods":{"void ReInit(java.io.InputStream)":{"name":"ReInit","returnType":"void","args":["java.io.InputStream"],"exceptions":[]},"void ReInit(java.io.InputStream, int, int, int)":{"name":"ReInit","returnType":"void","args":["java.io.InputStream","int","int","int"],"exceptions":[]},"void ReInit(java.io.InputStream, int, int)":{"name":"ReInit","returnType":"void","args":["java.io.InputStream","int","int"],"exceptions":[]},"void ReInit(java.io.InputStream, java.lang.String, int, int) throws java.io.UnsupportedEncodingException":{"name":"ReInit","returnType":"void","args":["java.io.InputStream","java.lang.String","int","int"],"exceptions":["java.io.UnsupportedEncodingException"]},"java.lang.String GetImage()":{"name":"GetImage","returnType":"java.lang.String","args":[],"exceptions":[]},"void Done()":{"name":"Done","returnType":"void","args":[],"exceptions":[]},"void adjustBeginLineColumn(int, int)":{"name":"adjustBeginLineColumn","returnType":"void","args":["int","int"],"exceptions":[]},"int getEndColumn()":{"name":"getEndColumn","returnType":"int","args":[],"exceptions":[]},"void ReInit(java.io.InputStream, java.lang.String) throws java.io.UnsupportedEncodingException":{"name":"ReInit","returnType":"void","args":["java.io.InputStream","java.lang.String"],"exceptions":["java.io.UnsupportedEncodingException"]},"void ReInit(java.io.Reader, int, int, int)":{"name":"ReInit","returnType":"void","args":["java.io.Reader","int","int","int"],"exceptions":[]},"[C GetSuffix(int)":{"name":"GetSuffix","returnType":"[C","args":["int"],"exceptions":[]},"int getBeginLine()":{"name":"getBeginLine","returnType":"int","args":[],"exceptions":[]},"void ReInit(java.io.Reader, int, int)":{"name":"ReInit","returnType":"void","args":["java.io.Reader","int","int"],"exceptions":[]},"int getEndLine()":{"name":"getEndLine","returnType":"int","args":[],"exceptions":[]},"void ReInit(java.io.InputStream, java.lang.String, int, int, int) throws java.io.UnsupportedEncodingException":{"name":"ReInit","returnType":"void","args":["java.io.InputStream","java.lang.String","int","int","int"],"exceptions":["java.io.UnsupportedEncodingException"]},"int getBeginColumn()":{"name":"getBeginColumn","returnType":"int","args":[],"exceptions":[]},"char BeginToken() throws java.io.IOException":{"name":"BeginToken","returnType":"char","args":[],"exceptions":["java.io.IOException"]},"char readChar() throws java.io.IOException":{"name":"readChar","returnType":"char","args":[],"exceptions":["java.io.IOException"]},"void backup(int)":{"name":"backup","returnType":"void","args":["int"],"exceptions":[]},"void ReInit(java.io.Reader)":{"name":"ReInit","returnType":"void","args":["java.io.Reader"],"exceptions":[]}}},"org.apache.hadoop.io.EnumSetWritable":{"name":"org.apache.hadoop.io.EnumSetWritable","methods":{"int hashCode()":{"name":"hashCode","returnType":"int","args":[],"exceptions":[]},"org.apache.hadoop.conf.Configuration getConf()":{"name":"getConf","returnType":"org.apache.hadoop.conf.Configuration","args":[],"exceptions":[]},"int size()":{"name":"size","returnType":"int","args":[],"exceptions":[]},"void set(java.util.EnumSet, java.lang.Class)":{"name":"set","returnType":"void","args":["java.util.EnumSet","java.lang.Class"],"exceptions":[]},"boolean add(java.lang.Enum)":{"name":"add","returnType":"boolean","args":["java.lang.Enum"],"exceptions":[]},"boolean equals(java.lang.Object)":{"name":"equals","returnType":"boolean","args":["java.lang.Object"],"exceptions":[]},"boolean add(java.lang.Object)":{"name":"add","returnType":"boolean","args":["java.lang.Object"],"exceptions":[]},"java.util.EnumSet get()":{"name":"get","returnType":"java.util.EnumSet","args":[],"exceptions":[]},"java.lang.String toString()":{"name":"toString","returnType":"java.lang.String","args":[],"exceptions":[]},"void write(java.io.DataOutput) throws java.io.IOException":{"name":"write","returnType":"void","args":["java.io.DataOutput"],"exceptions":["java.io.IOException"]},"void setConf(org.apache.hadoop.conf.Configuration)":{"name":"setConf","returnType":"void","args":["org.apache.hadoop.conf.Configuration"],"exceptions":[]},"java.lang.Class getElementType()":{"name":"getElementType","returnType":"java.lang.Class","args":[],"exceptions":[]},"java.util.Iterator iterator()":{"name":"iterator","returnType":"java.util.Iterator","args":[],"exceptions":[]},"void readFields(java.io.DataInput) throws java.io.IOException":{"name":"readFields","returnType":"void","args":["java.io.DataInput"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.fs.PathFilter":{"name":"org.apache.hadoop.fs.PathFilter","methods":{"boolean accept(org.apache.hadoop.fs.Path)":{"name":"accept","returnType":"boolean","args":["org.apache.hadoop.fs.Path"],"exceptions":[]}}},"org.apache.hadoop.io.BinaryComparable":{"name":"org.apache.hadoop.io.BinaryComparable","methods":{"int hashCode()":{"name":"hashCode","returnType":"int","args":[],"exceptions":[]},"int compareTo(org.apache.hadoop.io.BinaryComparable)":{"name":"compareTo","returnType":"int","args":["org.apache.hadoop.io.BinaryComparable"],"exceptions":[]},"int compareTo([B, int, int)":{"name":"compareTo","returnType":"int","args":["[B","int","int"],"exceptions":[]},"int getLength()":{"name":"getLength","returnType":"int","args":[],"exceptions":[]},"int compareTo(java.lang.Object)":{"name":"compareTo","returnType":"int","args":["java.lang.Object"],"exceptions":[]},"[B getBytes()":{"name":"getBytes","returnType":"[B","args":[],"exceptions":[]},"boolean equals(java.lang.Object)":{"name":"equals","returnType":"boolean","args":["java.lang.Object"],"exceptions":[]}}},"org.apache.hadoop.net.ConnectTimeoutException":{"name":"org.apache.hadoop.net.ConnectTimeoutException","methods":{}},"org.apache.hadoop.io.MapWritable":{"name":"org.apache.hadoop.io.MapWritable","methods":{"int hashCode()":{"name":"hashCode","returnType":"int","args":[],"exceptions":[]},"boolean containsKey(java.lang.Object)":{"name":"containsKey","returnType":"boolean","args":["java.lang.Object"],"exceptions":[]},"java.lang.Object remove(java.lang.Object)":{"name":"remove","returnType":"java.lang.Object","args":["java.lang.Object"],"exceptions":[]},"java.lang.Object get(java.lang.Object)":{"name":"get","returnType":"java.lang.Object","args":["java.lang.Object"],"exceptions":[]},"org.apache.hadoop.io.Writable put(org.apache.hadoop.io.Writable, org.apache.hadoop.io.Writable)":{"name":"put","returnType":"org.apache.hadoop.io.Writable","args":["org.apache.hadoop.io.Writable","org.apache.hadoop.io.Writable"],"exceptions":[]},"int size()":{"name":"size","returnType":"int","args":[],"exceptions":[]},"boolean containsValue(java.lang.Object)":{"name":"containsValue","returnType":"boolean","args":["java.lang.Object"],"exceptions":[]},"org.apache.hadoop.io.Writable remove(java.lang.Object)":{"name":"remove","returnType":"org.apache.hadoop.io.Writable","args":["java.lang.Object"],"exceptions":[]},"java.util.Collection values()":{"name":"values","returnType":"java.util.Collection","args":[],"exceptions":[]},"boolean equals(java.lang.Object)":{"name":"equals","returnType":"boolean","args":["java.lang.Object"],"exceptions":[]},"void putAll(java.util.Map)":{"name":"putAll","returnType":"void","args":["java.util.Map"],"exceptions":[]},"void write(java.io.DataOutput) throws java.io.IOException":{"name":"write","returnType":"void","args":["java.io.DataOutput"],"exceptions":["java.io.IOException"]},"java.lang.Object put(java.lang.Object, java.lang.Object)":{"name":"put","returnType":"java.lang.Object","args":["java.lang.Object","java.lang.Object"],"exceptions":[]},"boolean isEmpty()":{"name":"isEmpty","returnType":"boolean","args":[],"exceptions":[]},"java.util.Set entrySet()":{"name":"entrySet","returnType":"java.util.Set","args":[],"exceptions":[]},"org.apache.hadoop.io.Writable get(java.lang.Object)":{"name":"get","returnType":"org.apache.hadoop.io.Writable","args":["java.lang.Object"],"exceptions":[]},"java.util.Set keySet()":{"name":"keySet","returnType":"java.util.Set","args":[],"exceptions":[]},"void clear()":{"name":"clear","returnType":"void","args":[],"exceptions":[]},"void readFields(java.io.DataInput) throws java.io.IOException":{"name":"readFields","returnType":"void","args":["java.io.DataInput"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.io.ByteBufferPool":{"name":"org.apache.hadoop.io.ByteBufferPool","methods":{"void putBuffer(java.nio.ByteBuffer)":{"name":"putBuffer","returnType":"void","args":["java.nio.ByteBuffer"],"exceptions":[]},"java.nio.ByteBuffer getBuffer(boolean, int)":{"name":"getBuffer","returnType":"java.nio.ByteBuffer","args":["boolean","int"],"exceptions":[]}}},"org.apache.hadoop.io.DoubleWritable":{"name":"org.apache.hadoop.io.DoubleWritable","methods":{"int hashCode()":{"name":"hashCode","returnType":"int","args":[],"exceptions":[]},"double get()":{"name":"get","returnType":"double","args":[],"exceptions":[]},"java.lang.String toString()":{"name":"toString","returnType":"java.lang.String","args":[],"exceptions":[]},"int compareTo(org.apache.hadoop.io.DoubleWritable)":{"name":"compareTo","returnType":"int","args":["org.apache.hadoop.io.DoubleWritable"],"exceptions":[]},"void write(java.io.DataOutput) throws java.io.IOException":{"name":"write","returnType":"void","args":["java.io.DataOutput"],"exceptions":["java.io.IOException"]},"int compareTo(java.lang.Object)":{"name":"compareTo","returnType":"int","args":["java.lang.Object"],"exceptions":[]},"boolean equals(java.lang.Object)":{"name":"equals","returnType":"boolean","args":["java.lang.Object"],"exceptions":[]},"void set(double)":{"name":"set","returnType":"void","args":["double"],"exceptions":[]},"void readFields(java.io.DataInput) throws java.io.IOException":{"name":"readFields","returnType":"void","args":["java.io.DataInput"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.record.compiler.JBuffer":{"name":"org.apache.hadoop.record.compiler.JBuffer","methods":{}},"org.apache.hadoop.io.CompressedWritable":{"name":"org.apache.hadoop.io.CompressedWritable","methods":{"void write(java.io.DataOutput) throws java.io.IOException":{"name":"write","returnType":"void","args":["java.io.DataOutput"],"exceptions":["java.io.IOException"]},"void readFields(java.io.DataInput) throws java.io.IOException":{"name":"readFields","returnType":"void","args":["java.io.DataInput"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.io.ByteWritable":{"name":"org.apache.hadoop.io.ByteWritable","methods":{"int hashCode()":{"name":"hashCode","returnType":"int","args":[],"exceptions":[]},"java.lang.String toString()":{"name":"toString","returnType":"java.lang.String","args":[],"exceptions":[]},"void write(java.io.DataOutput) throws java.io.IOException":{"name":"write","returnType":"void","args":["java.io.DataOutput"],"exceptions":["java.io.IOException"]},"byte get()":{"name":"get","returnType":"byte","args":[],"exceptions":[]},"int compareTo(org.apache.hadoop.io.ByteWritable)":{"name":"compareTo","returnType":"int","args":["org.apache.hadoop.io.ByteWritable"],"exceptions":[]},"int compareTo(java.lang.Object)":{"name":"compareTo","returnType":"int","args":["java.lang.Object"],"exceptions":[]},"void set(byte)":{"name":"set","returnType":"void","args":["byte"],"exceptions":[]},"boolean equals(java.lang.Object)":{"name":"equals","returnType":"boolean","args":["java.lang.Object"],"exceptions":[]},"void readFields(java.io.DataInput) throws java.io.IOException":{"name":"readFields","returnType":"void","args":["java.io.DataInput"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.fs.ParentNotDirectoryException":{"name":"org.apache.hadoop.fs.ParentNotDirectoryException","methods":{}},"org.apache.hadoop.io.VersionMismatchException":{"name":"org.apache.hadoop.io.VersionMismatchException","methods":{"java.lang.String toString()":{"name":"toString","returnType":"java.lang.String","args":[],"exceptions":[]}}},"org.apache.hadoop.io.BytesWritable":{"name":"org.apache.hadoop.io.BytesWritable","methods":{"int hashCode()":{"name":"hashCode","returnType":"int","args":[],"exceptions":[]},"[B get()":{"name":"get","returnType":"[B","args":[],"exceptions":[]},"int getCapacity()":{"name":"getCapacity","returnType":"int","args":[],"exceptions":[]},"int getLength()":{"name":"getLength","returnType":"int","args":[],"exceptions":[]},"void set([B, int, int)":{"name":"set","returnType":"void","args":["[B","int","int"],"exceptions":[]},"void set(org.apache.hadoop.io.BytesWritable)":{"name":"set","returnType":"void","args":["org.apache.hadoop.io.BytesWritable"],"exceptions":[]},"void setCapacity(int)":{"name":"setCapacity","returnType":"void","args":["int"],"exceptions":[]},"boolean equals(java.lang.Object)":{"name":"equals","returnType":"boolean","args":["java.lang.Object"],"exceptions":[]},"int getSize()":{"name":"getSize","returnType":"int","args":[],"exceptions":[]},"java.lang.String toString()":{"name":"toString","returnType":"java.lang.String","args":[],"exceptions":[]},"void write(java.io.DataOutput) throws java.io.IOException":{"name":"write","returnType":"void","args":["java.io.DataOutput"],"exceptions":["java.io.IOException"]},"void setSize(int)":{"name":"setSize","returnType":"void","args":["int"],"exceptions":[]},"[B copyBytes()":{"name":"copyBytes","returnType":"[B","args":[],"exceptions":[]},"[B getBytes()":{"name":"getBytes","returnType":"[B","args":[],"exceptions":[]},"void readFields(java.io.DataInput) throws java.io.IOException":{"name":"readFields","returnType":"void","args":["java.io.DataInput"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.HadoopIllegalArgumentException":{"name":"org.apache.hadoop.HadoopIllegalArgumentException","methods":{}},"org.apache.hadoop.record.Record":{"name":"org.apache.hadoop.record.Record","methods":{"void deserialize(org.apache.hadoop.record.RecordInput) throws java.io.IOException":{"name":"deserialize","returnType":"void","args":["org.apache.hadoop.record.RecordInput"],"exceptions":["java.io.IOException"]},"java.lang.String toString()":{"name":"toString","returnType":"java.lang.String","args":[],"exceptions":[]},"void write(java.io.DataOutput) throws java.io.IOException":{"name":"write","returnType":"void","args":["java.io.DataOutput"],"exceptions":["java.io.IOException"]},"void serialize(org.apache.hadoop.record.RecordOutput) throws java.io.IOException":{"name":"serialize","returnType":"void","args":["org.apache.hadoop.record.RecordOutput"],"exceptions":["java.io.IOException"]},"void serialize(org.apache.hadoop.record.RecordOutput, java.lang.String) throws java.io.IOException":{"name":"serialize","returnType":"void","args":["org.apache.hadoop.record.RecordOutput","java.lang.String"],"exceptions":["java.io.IOException"]},"void deserialize(org.apache.hadoop.record.RecordInput, java.lang.String) throws java.io.IOException":{"name":"deserialize","returnType":"void","args":["org.apache.hadoop.record.RecordInput","java.lang.String"],"exceptions":["java.io.IOException"]},"int compareTo(java.lang.Object) throws java.lang.ClassCastException":{"name":"compareTo","returnType":"int","args":["java.lang.Object"],"exceptions":["java.lang.ClassCastException"]},"void readFields(java.io.DataInput) throws java.io.IOException":{"name":"readFields","returnType":"void","args":["java.io.DataInput"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.record.RecordOutput":{"name":"org.apache.hadoop.record.RecordOutput","methods":{"void writeBuffer(org.apache.hadoop.record.Buffer, java.lang.String) throws java.io.IOException":{"name":"writeBuffer","returnType":"void","args":["org.apache.hadoop.record.Buffer","java.lang.String"],"exceptions":["java.io.IOException"]},"void startMap(java.util.TreeMap, java.lang.String) throws java.io.IOException":{"name":"startMap","returnType":"void","args":["java.util.TreeMap","java.lang.String"],"exceptions":["java.io.IOException"]},"void writeBool(boolean, java.lang.String) throws java.io.IOException":{"name":"writeBool","returnType":"void","args":["boolean","java.lang.String"],"exceptions":["java.io.IOException"]},"void writeFloat(float, java.lang.String) throws java.io.IOException":{"name":"writeFloat","returnType":"void","args":["float","java.lang.String"],"exceptions":["java.io.IOException"]},"void writeInt(int, java.lang.String) throws java.io.IOException":{"name":"writeInt","returnType":"void","args":["int","java.lang.String"],"exceptions":["java.io.IOException"]},"void startRecord(org.apache.hadoop.record.Record, java.lang.String) throws java.io.IOException":{"name":"startRecord","returnType":"void","args":["org.apache.hadoop.record.Record","java.lang.String"],"exceptions":["java.io.IOException"]},"void endRecord(org.apache.hadoop.record.Record, java.lang.String) throws java.io.IOException":{"name":"endRecord","returnType":"void","args":["org.apache.hadoop.record.Record","java.lang.String"],"exceptions":["java.io.IOException"]},"void endVector(java.util.ArrayList, java.lang.String) throws java.io.IOException":{"name":"endVector","returnType":"void","args":["java.util.ArrayList","java.lang.String"],"exceptions":["java.io.IOException"]},"void endMap(java.util.TreeMap, java.lang.String) throws java.io.IOException":{"name":"endMap","returnType":"void","args":["java.util.TreeMap","java.lang.String"],"exceptions":["java.io.IOException"]},"void writeByte(byte, java.lang.String) throws java.io.IOException":{"name":"writeByte","returnType":"void","args":["byte","java.lang.String"],"exceptions":["java.io.IOException"]},"void startVector(java.util.ArrayList, java.lang.String) throws java.io.IOException":{"name":"startVector","returnType":"void","args":["java.util.ArrayList","java.lang.String"],"exceptions":["java.io.IOException"]},"void writeDouble(double, java.lang.String) throws java.io.IOException":{"name":"writeDouble","returnType":"void","args":["double","java.lang.String"],"exceptions":["java.io.IOException"]},"void writeLong(long, java.lang.String) throws java.io.IOException":{"name":"writeLong","returnType":"void","args":["long","java.lang.String"],"exceptions":["java.io.IOException"]},"void writeString(java.lang.String, java.lang.String) throws java.io.IOException":{"name":"writeString","returnType":"void","args":["java.lang.String","java.lang.String"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.io.Closeable":{"name":"org.apache.hadoop.io.Closeable","methods":{}},"org.apache.hadoop.io.WritableFactory":{"name":"org.apache.hadoop.io.WritableFactory","methods":{"org.apache.hadoop.io.Writable newInstance()":{"name":"newInstance","returnType":"org.apache.hadoop.io.Writable","args":[],"exceptions":[]}}},"org.apache.hadoop.record.CsvRecordInput":{"name":"org.apache.hadoop.record.CsvRecordInput","methods":{"long readLong(java.lang.String) throws java.io.IOException":{"name":"readLong","returnType":"long","args":["java.lang.String"],"exceptions":["java.io.IOException"]},"void endMap(java.lang.String) throws java.io.IOException":{"name":"endMap","returnType":"void","args":["java.lang.String"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.record.Buffer readBuffer(java.lang.String) throws java.io.IOException":{"name":"readBuffer","returnType":"org.apache.hadoop.record.Buffer","args":["java.lang.String"],"exceptions":["java.io.IOException"]},"void endVector(java.lang.String) throws java.io.IOException":{"name":"endVector","returnType":"void","args":["java.lang.String"],"exceptions":["java.io.IOException"]},"byte readByte(java.lang.String) throws java.io.IOException":{"name":"readByte","returnType":"byte","args":["java.lang.String"],"exceptions":["java.io.IOException"]},"float readFloat(java.lang.String) throws java.io.IOException":{"name":"readFloat","returnType":"float","args":["java.lang.String"],"exceptions":["java.io.IOException"]},"void startRecord(java.lang.String) throws java.io.IOException":{"name":"startRecord","returnType":"void","args":["java.lang.String"],"exceptions":["java.io.IOException"]},"double readDouble(java.lang.String) throws java.io.IOException":{"name":"readDouble","returnType":"double","args":["java.lang.String"],"exceptions":["java.io.IOException"]},"void endRecord(java.lang.String) throws java.io.IOException":{"name":"endRecord","returnType":"void","args":["java.lang.String"],"exceptions":["java.io.IOException"]},"java.lang.String readString(java.lang.String) throws java.io.IOException":{"name":"readString","returnType":"java.lang.String","args":["java.lang.String"],"exceptions":["java.io.IOException"]},"int readInt(java.lang.String) throws java.io.IOException":{"name":"readInt","returnType":"int","args":["java.lang.String"],"exceptions":["java.io.IOException"]},"boolean readBool(java.lang.String) throws java.io.IOException":{"name":"readBool","returnType":"boolean","args":["java.lang.String"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.record.Index startVector(java.lang.String) throws java.io.IOException":{"name":"startVector","returnType":"org.apache.hadoop.record.Index","args":["java.lang.String"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.record.Index startMap(java.lang.String) throws java.io.IOException":{"name":"startMap","returnType":"org.apache.hadoop.record.Index","args":["java.lang.String"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.util.ToolRunner":{"name":"org.apache.hadoop.util.ToolRunner","methods":{"void printGenericCommandUsage(java.io.PrintStream)":{"name":"printGenericCommandUsage","returnType":"void","args":["java.io.PrintStream"],"exceptions":[]},"boolean confirmPrompt(java.lang.String) throws java.io.IOException":{"name":"confirmPrompt","returnType":"boolean","args":["java.lang.String"],"exceptions":["java.io.IOException"]},"int run(org.apache.hadoop.util.Tool, [Ljava.lang.String;) throws java.lang.Exception":{"name":"run","returnType":"int","args":["org.apache.hadoop.util.Tool","[Ljava.lang.String;"],"exceptions":["java.lang.Exception"]},"int run(org.apache.hadoop.conf.Configuration, org.apache.hadoop.util.Tool, [Ljava.lang.String;) throws java.lang.Exception":{"name":"run","returnType":"int","args":["org.apache.hadoop.conf.Configuration","org.apache.hadoop.util.Tool","[Ljava.lang.String;"],"exceptions":["java.lang.Exception"]}}},"org.apache.hadoop.tracing.SpanReceiverInfo":{"name":"org.apache.hadoop.tracing.SpanReceiverInfo","methods":{"long getId()":{"name":"getId","returnType":"long","args":[],"exceptions":[]},"java.lang.String getClassName()":{"name":"getClassName","returnType":"java.lang.String","args":[],"exceptions":[]}}},"org.apache.hadoop.util.bloom.RemoveScheme":{"name":"org.apache.hadoop.util.bloom.RemoveScheme","methods":{}},"org.apache.hadoop.record.Index":{"name":"org.apache.hadoop.record.Index","methods":{"boolean done()":{"name":"done","returnType":"boolean","args":[],"exceptions":[]},"void incr()":{"name":"incr","returnType":"void","args":[],"exceptions":[]}}},"org.apache.hadoop.io.RawComparator":{"name":"org.apache.hadoop.io.RawComparator","methods":{"int compare([B, int, int, [B, int, int)":{"name":"compare","returnType":"int","args":["[B","int","int","[B","int","int"],"exceptions":[]}}},"org.apache.hadoop.io.MD5Hash":{"name":"org.apache.hadoop.io.MD5Hash","methods":{"void set(org.apache.hadoop.io.MD5Hash)":{"name":"set","returnType":"void","args":["org.apache.hadoop.io.MD5Hash"],"exceptions":[]},"int hashCode()":{"name":"hashCode","returnType":"int","args":[],"exceptions":[]},"org.apache.hadoop.io.MD5Hash read(java.io.DataInput) throws java.io.IOException":{"name":"read","returnType":"org.apache.hadoop.io.MD5Hash","args":["java.io.DataInput"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.io.MD5Hash digest(java.lang.String)":{"name":"digest","returnType":"org.apache.hadoop.io.MD5Hash","args":["java.lang.String"],"exceptions":[]},"java.security.MessageDigest getDigester()":{"name":"getDigester","returnType":"java.security.MessageDigest","args":[],"exceptions":[]},"int compareTo(java.lang.Object)":{"name":"compareTo","returnType":"int","args":["java.lang.Object"],"exceptions":[]},"boolean equals(java.lang.Object)":{"name":"equals","returnType":"boolean","args":["java.lang.Object"],"exceptions":[]},"org.apache.hadoop.io.MD5Hash digest([B)":{"name":"digest","returnType":"org.apache.hadoop.io.MD5Hash","args":["[B"],"exceptions":[]},"org.apache.hadoop.io.MD5Hash digest(java.io.InputStream) throws java.io.IOException":{"name":"digest","returnType":"org.apache.hadoop.io.MD5Hash","args":["java.io.InputStream"],"exceptions":["java.io.IOException"]},"void setDigest(java.lang.String)":{"name":"setDigest","returnType":"void","args":["java.lang.String"],"exceptions":[]},"java.lang.String toString()":{"name":"toString","returnType":"java.lang.String","args":[],"exceptions":[]},"void write(java.io.DataOutput) throws java.io.IOException":{"name":"write","returnType":"void","args":["java.io.DataOutput"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.io.MD5Hash digest(org.apache.hadoop.io.UTF8)":{"name":"digest","returnType":"org.apache.hadoop.io.MD5Hash","args":["org.apache.hadoop.io.UTF8"],"exceptions":[]},"int quarterDigest()":{"name":"quarterDigest","returnType":"int","args":[],"exceptions":[]},"int compareTo(org.apache.hadoop.io.MD5Hash)":{"name":"compareTo","returnType":"int","args":["org.apache.hadoop.io.MD5Hash"],"exceptions":[]},"long halfDigest()":{"name":"halfDigest","returnType":"long","args":[],"exceptions":[]},"[B getDigest()":{"name":"getDigest","returnType":"[B","args":[],"exceptions":[]},"org.apache.hadoop.io.MD5Hash digest([B, int, int)":{"name":"digest","returnType":"org.apache.hadoop.io.MD5Hash","args":["[B","int","int"],"exceptions":[]},"void readFields(java.io.DataInput) throws java.io.IOException":{"name":"readFields","returnType":"void","args":["java.io.DataInput"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.record.compiler.JMap":{"name":"org.apache.hadoop.record.compiler.JMap","methods":{}},"org.apache.hadoop.io.VIntWritable":{"name":"org.apache.hadoop.io.VIntWritable","methods":{"int hashCode()":{"name":"hashCode","returnType":"int","args":[],"exceptions":[]},"java.lang.String toString()":{"name":"toString","returnType":"java.lang.String","args":[],"exceptions":[]},"void write(java.io.DataOutput) throws java.io.IOException":{"name":"write","returnType":"void","args":["java.io.DataOutput"],"exceptions":["java.io.IOException"]},"void set(int)":{"name":"set","returnType":"void","args":["int"],"exceptions":[]},"int compareTo(org.apache.hadoop.io.VIntWritable)":{"name":"compareTo","returnType":"int","args":["org.apache.hadoop.io.VIntWritable"],"exceptions":[]},"int get()":{"name":"get","returnType":"int","args":[],"exceptions":[]},"int compareTo(java.lang.Object)":{"name":"compareTo","returnType":"int","args":["java.lang.Object"],"exceptions":[]},"boolean equals(java.lang.Object)":{"name":"equals","returnType":"boolean","args":["java.lang.Object"],"exceptions":[]},"void readFields(java.io.DataInput) throws java.io.IOException":{"name":"readFields","returnType":"void","args":["java.io.DataInput"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.fs.LocalFileSystem":{"name":"org.apache.hadoop.fs.LocalFileSystem","methods":{"void createSymlink(org.apache.hadoop.fs.Path, org.apache.hadoop.fs.Path, boolean) throws java.io.IOException":{"name":"createSymlink","returnType":"void","args":["org.apache.hadoop.fs.Path","org.apache.hadoop.fs.Path","boolean"],"exceptions":["java.io.IOException"]},"void initialize(java.net.URI, org.apache.hadoop.conf.Configuration) throws java.io.IOException":{"name":"initialize","returnType":"void","args":["java.net.URI","org.apache.hadoop.conf.Configuration"],"exceptions":["java.io.IOException"]},"void copyToLocalFile(boolean, org.apache.hadoop.fs.Path, org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"copyToLocalFile","returnType":"void","args":["boolean","org.apache.hadoop.fs.Path","org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"java.io.File pathToFile(org.apache.hadoop.fs.Path)":{"name":"pathToFile","returnType":"java.io.File","args":["org.apache.hadoop.fs.Path"],"exceptions":[]},"boolean supportsSymlinks()":{"name":"supportsSymlinks","returnType":"boolean","args":[],"exceptions":[]},"void copyFromLocalFile(boolean, org.apache.hadoop.fs.Path, org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"copyFromLocalFile","returnType":"void","args":["boolean","org.apache.hadoop.fs.Path","org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"java.lang.String getScheme()":{"name":"getScheme","returnType":"java.lang.String","args":[],"exceptions":[]},"org.apache.hadoop.fs.Path getLinkTarget(org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"getLinkTarget","returnType":"org.apache.hadoop.fs.Path","args":["org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"boolean reportChecksumFailure(org.apache.hadoop.fs.Path, org.apache.hadoop.fs.FSDataInputStream, long, org.apache.hadoop.fs.FSDataInputStream, long)":{"name":"reportChecksumFailure","returnType":"boolean","args":["org.apache.hadoop.fs.Path","org.apache.hadoop.fs.FSDataInputStream","long","org.apache.hadoop.fs.FSDataInputStream","long"],"exceptions":[]},"org.apache.hadoop.fs.FileStatus getFileLinkStatus(org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"getFileLinkStatus","returnType":"org.apache.hadoop.fs.FileStatus","args":["org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.FileSystem getRaw()":{"name":"getRaw","returnType":"org.apache.hadoop.fs.FileSystem","args":[],"exceptions":[]}}},"org.apache.hadoop.record.CsvRecordOutput":{"name":"org.apache.hadoop.record.CsvRecordOutput","methods":{"void writeBuffer(org.apache.hadoop.record.Buffer, java.lang.String) throws java.io.IOException":{"name":"writeBuffer","returnType":"void","args":["org.apache.hadoop.record.Buffer","java.lang.String"],"exceptions":["java.io.IOException"]},"void startMap(java.util.TreeMap, java.lang.String) throws java.io.IOException":{"name":"startMap","returnType":"void","args":["java.util.TreeMap","java.lang.String"],"exceptions":["java.io.IOException"]},"void writeBool(boolean, java.lang.String) throws java.io.IOException":{"name":"writeBool","returnType":"void","args":["boolean","java.lang.String"],"exceptions":["java.io.IOException"]},"void writeFloat(float, java.lang.String) throws java.io.IOException":{"name":"writeFloat","returnType":"void","args":["float","java.lang.String"],"exceptions":["java.io.IOException"]},"void writeInt(int, java.lang.String) throws java.io.IOException":{"name":"writeInt","returnType":"void","args":["int","java.lang.String"],"exceptions":["java.io.IOException"]},"void startRecord(org.apache.hadoop.record.Record, java.lang.String) throws java.io.IOException":{"name":"startRecord","returnType":"void","args":["org.apache.hadoop.record.Record","java.lang.String"],"exceptions":["java.io.IOException"]},"void endRecord(org.apache.hadoop.record.Record, java.lang.String) throws java.io.IOException":{"name":"endRecord","returnType":"void","args":["org.apache.hadoop.record.Record","java.lang.String"],"exceptions":["java.io.IOException"]},"void endVector(java.util.ArrayList, java.lang.String) throws java.io.IOException":{"name":"endVector","returnType":"void","args":["java.util.ArrayList","java.lang.String"],"exceptions":["java.io.IOException"]},"void endMap(java.util.TreeMap, java.lang.String) throws java.io.IOException":{"name":"endMap","returnType":"void","args":["java.util.TreeMap","java.lang.String"],"exceptions":["java.io.IOException"]},"void writeByte(byte, java.lang.String) throws java.io.IOException":{"name":"writeByte","returnType":"void","args":["byte","java.lang.String"],"exceptions":["java.io.IOException"]},"void startVector(java.util.ArrayList, java.lang.String) throws java.io.IOException":{"name":"startVector","returnType":"void","args":["java.util.ArrayList","java.lang.String"],"exceptions":["java.io.IOException"]},"void writeDouble(double, java.lang.String) throws java.io.IOException":{"name":"writeDouble","returnType":"void","args":["double","java.lang.String"],"exceptions":["java.io.IOException"]},"void writeLong(long, java.lang.String) throws java.io.IOException":{"name":"writeLong","returnType":"void","args":["long","java.lang.String"],"exceptions":["java.io.IOException"]},"void writeString(java.lang.String, java.lang.String) throws java.io.IOException":{"name":"writeString","returnType":"void","args":["java.lang.String","java.lang.String"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.io.WritableUtils":{"name":"org.apache.hadoop.io.WritableUtils","methods":{"int readVIntInRange(java.io.DataInput, int, int) throws java.io.IOException":{"name":"readVIntInRange","returnType":"int","args":["java.io.DataInput","int","int"],"exceptions":["java.io.IOException"]},"void writeVInt(java.io.DataOutput, int) throws java.io.IOException":{"name":"writeVInt","returnType":"void","args":["java.io.DataOutput","int"],"exceptions":["java.io.IOException"]},"[Ljava.lang.String; readStringArray(java.io.DataInput) throws java.io.IOException":{"name":"readStringArray","returnType":"[Ljava.lang.String;","args":["java.io.DataInput"],"exceptions":["java.io.IOException"]},"void cloneInto(org.apache.hadoop.io.Writable, org.apache.hadoop.io.Writable) throws java.io.IOException":{"name":"cloneInto","returnType":"void","args":["org.apache.hadoop.io.Writable","org.apache.hadoop.io.Writable"],"exceptions":["java.io.IOException"]},"int getVIntSize(long)":{"name":"getVIntSize","returnType":"int","args":["long"],"exceptions":[]},"[B readCompressedByteArray(java.io.DataInput) throws java.io.IOException":{"name":"readCompressedByteArray","returnType":"[B","args":["java.io.DataInput"],"exceptions":["java.io.IOException"]},"void writeCompressedStringArray(java.io.DataOutput, [Ljava.lang.String;) throws java.io.IOException":{"name":"writeCompressedStringArray","returnType":"void","args":["java.io.DataOutput","[Ljava.lang.String;"],"exceptions":["java.io.IOException"]},"void writeStringArray(java.io.DataOutput, [Ljava.lang.String;) throws java.io.IOException":{"name":"writeStringArray","returnType":"void","args":["java.io.DataOutput","[Ljava.lang.String;"],"exceptions":["java.io.IOException"]},"void writeString(java.io.DataOutput, java.lang.String) throws java.io.IOException":{"name":"writeString","returnType":"void","args":["java.io.DataOutput","java.lang.String"],"exceptions":["java.io.IOException"]},"void displayByteArray([B)":{"name":"displayByteArray","returnType":"void","args":["[B"],"exceptions":[]},"int writeCompressedString(java.io.DataOutput, java.lang.String) throws java.io.IOException":{"name":"writeCompressedString","returnType":"int","args":["java.io.DataOutput","java.lang.String"],"exceptions":["java.io.IOException"]},"[Ljava.lang.String; readCompressedStringArray(java.io.DataInput) throws java.io.IOException":{"name":"readCompressedStringArray","returnType":"[Ljava.lang.String;","args":["java.io.DataInput"],"exceptions":["java.io.IOException"]},"boolean isNegativeVInt(byte)":{"name":"isNegativeVInt","returnType":"boolean","args":["byte"],"exceptions":[]},"org.apache.hadoop.io.Writable clone(org.apache.hadoop.io.Writable, org.apache.hadoop.conf.Configuration)":{"name":"clone","returnType":"org.apache.hadoop.io.Writable","args":["org.apache.hadoop.io.Writable","org.apache.hadoop.conf.Configuration"],"exceptions":[]},"int decodeVIntSize(byte)":{"name":"decodeVIntSize","returnType":"int","args":["byte"],"exceptions":[]},"int readVInt(java.io.DataInput) throws java.io.IOException":{"name":"readVInt","returnType":"int","args":["java.io.DataInput"],"exceptions":["java.io.IOException"]},"int writeCompressedByteArray(java.io.DataOutput, [B) throws java.io.IOException":{"name":"writeCompressedByteArray","returnType":"int","args":["java.io.DataOutput","[B"],"exceptions":["java.io.IOException"]},"void writeEnum(java.io.DataOutput, java.lang.Enum) throws java.io.IOException":{"name":"writeEnum","returnType":"void","args":["java.io.DataOutput","java.lang.Enum"],"exceptions":["java.io.IOException"]},"[B toByteArray([Lorg.apache.hadoop.io.Writable;)":{"name":"toByteArray","returnType":"[B","args":["[Lorg.apache.hadoop.io.Writable;"],"exceptions":[]},"java.lang.String readString(java.io.DataInput) throws java.io.IOException":{"name":"readString","returnType":"java.lang.String","args":["java.io.DataInput"],"exceptions":["java.io.IOException"]},"java.lang.String readStringSafely(java.io.DataInput, int) throws java.io.IOException, java.lang.IllegalArgumentException":{"name":"readStringSafely","returnType":"java.lang.String","args":["java.io.DataInput","int"],"exceptions":["java.io.IOException","java.lang.IllegalArgumentException"]},"java.lang.Enum readEnum(java.io.DataInput, java.lang.Class) throws java.io.IOException":{"name":"readEnum","returnType":"java.lang.Enum","args":["java.io.DataInput","java.lang.Class"],"exceptions":["java.io.IOException"]},"long readVLong(java.io.DataInput) throws java.io.IOException":{"name":"readVLong","returnType":"long","args":["java.io.DataInput"],"exceptions":["java.io.IOException"]},"void skipCompressedByteArray(java.io.DataInput) throws java.io.IOException":{"name":"skipCompressedByteArray","returnType":"void","args":["java.io.DataInput"],"exceptions":["java.io.IOException"]},"void skipFully(java.io.DataInput, int) throws java.io.IOException":{"name":"skipFully","returnType":"void","args":["java.io.DataInput","int"],"exceptions":["java.io.IOException"]},"java.lang.String readCompressedString(java.io.DataInput) throws java.io.IOException":{"name":"readCompressedString","returnType":"java.lang.String","args":["java.io.DataInput"],"exceptions":["java.io.IOException"]},"void writeVLong(java.io.DataOutput, long) throws java.io.IOException":{"name":"writeVLong","returnType":"void","args":["java.io.DataOutput","long"],"exceptions":["java.io.IOException"]}}}}} \ No newline at end of file diff --git a/bigtop-tests/spec-tests/runtime/src/test/resources/hadoop-common-bin.list b/bigtop-tests/spec-tests/runtime/src/test/resources/hadoop-common-bin.list deleted file mode 100644 index ab6cd511..00000000 --- a/bigtop-tests/spec-tests/runtime/src/test/resources/hadoop-common-bin.list +++ /dev/null @@ -1,2 +0,0 @@ -rcc -hadoop diff --git a/bigtop-tests/spec-tests/runtime/src/test/resources/hadoop-common-jar.list b/bigtop-tests/spec-tests/runtime/src/test/resources/hadoop-common-jar.list deleted file mode 100644 index 2edbd0f9..00000000 --- a/bigtop-tests/spec-tests/runtime/src/test/resources/hadoop-common-jar.list +++ /dev/null @@ -1,60 +0,0 @@ -api-util-1\.0\.0-M20[\.\-_].*jar -curator-recipes-2\.7\.1[\.\-_].*jar -curator-framework-2\.7\.1[\.\-_].*jar -netty-3\.6\.2\.Final[\.\-_].*jar -gson-2\.2\.4[\.\-_].*jar -paranamer-2\.3[\.\-_].*jar -jackson-core-asl-1\.9\.13[\.\-_].*jar -jackson-xc-1\.9\.13[\.\-_].*jar -jersey-server-1\.9[\.\-_].*jar -stax-api-1\.0-2[\.\-_].*jar -zookeeper-3\.4\.6[\.\-_].*jar -htrace-core-3\.1\.0-incubating[\.\-_].*jar -slf4j-api-1\.7\.10[\.\-_].*jar -avro-1\.7\.[4-7][\.\-_].*jar -slf4j-log4j12-1\.7\.10[\.\-_].*jar -curator-client-2\.7\.1[\.\-_].*jar -jets3t-0\.9\.0[\.\-_].*jar -commons-net-3\.1[\.\-_].*jar -jaxb-impl-2\.2\.3-1[\.\-_].*jar -httpclient-4\.[0-9]\.[0-9][\.\-_].*jar -apacheds-kerberos-codec-2\.0\.0-M15[\.\-_].*jar -commons-cli-1\.2[\.\-_].*jar -log4j-1\.2\.17[\.\-_].*jar -jackson-mapper-asl-1\.9\.13[\.\-_].*jar -java-xmlbuilder-0\.4[\.\-_].*jar -jsp-api-2\.1[\.\-_].*jar -guava-11\.0\.2[\.\-_].*jar -jetty-6\.1\.26[\.\-_].*jar -commons-logging-1\.1\.3[\.\-_].*jar -snappy-java-1\.0\.[45](\.[0-9])?[\.\-_].*jar -commons-httpclient-3\.1[\.\-_].*jar -jsch-0\.1\.(4[2-9]|[5-9]\d)[\.\-_].*jar -jersey-core-1\.9[\.\-_].*jar -commons-compress-1\.4\.1[\.\-_].*jar -jettison-1\.1[\.\-_].*jar -junit-4\.11[\.\-_].*jar -commons-collections-3\.2\.[12][\.\-_].*jar -xz-1\.0[\.\-_].*jar -asm-3\.2[\.\-_].*jar -commons-codec-1\.4[\.\-_].*jar -commons-digester-1\.8[\.\-_].*jar -api-asn1-api-1\.0\.0-M20[\.\-_].*jar -xmlenc-0\.52[\.\-_].*jar -commons-configuration-1\.6[\.\-_].*jar -mockito-all-1\.8\.5[\.\-_].*jar -commons-lang-2\.6[\.\-_].*jar -jetty-util-6\.1\.26[\.\-_].*jar -jsr305-3\.0\.0[\.\-_].*jar -protobuf-java-2\.5\.0[\.\-_].*jar -httpcore-4\.[0-9]\.[0-9][\.\-_].*jar -commons-io-2\.4[\.\-_].*jar -activation-1\.1[\.\-_].*jar -jersey-json-1\.9[\.\-_].*jar -jaxb-api-2\.2\.2[\.\-_].*jar -commons-math3-3\.1\.1[\.\-_].*jar -hamcrest-core-1\.3[\.\-_].*jar -commons-beanutils(-core)?-1\.[78]\.0[\.\-_].*jar -apacheds-i18n-2\.0\.0-M15[\.\-_].*jar -servlet-api-2\.5[\.\-_].*jar -jackson-jaxrs-1\.9\.13[\.\-_].*jar diff --git a/bigtop-tests/spec-tests/runtime/src/test/resources/hadoop-common.list b/bigtop-tests/spec-tests/runtime/src/test/resources/hadoop-common.list deleted file mode 100644 index 73ff182a..00000000 --- a/bigtop-tests/spec-tests/runtime/src/test/resources/hadoop-common.list +++ /dev/null @@ -1,230 +0,0 @@ -bin -bin/rcc -bin/hadoop -sbin -sbin/hadoop-daemons\.sh -sbin/hadoop-daemon\.sh -sbin/slaves\.sh -hadoop-annotations-2\.7\.[0-9][\.\-_].*jar -hadoop-common-2\.7\.[0-9][\.\-_].*jar -hadoop-annotations[\.\-_].*jar -hadoop-common-2\.7\.[0-9].*-tests\.jar -etc -etc/hadoop -hadoop-common[\.\-_].*jar -hadoop-auth-2\.7\.[0-9][\.\-_].*jar -libexec -libexec/hdfs-config\.sh -libexec/hadoop-layout\.sh -libexec/yarn-config\.sh -libexec/mapred-config\.sh -libexec/hadoop-config\.sh -libexec/init-hdfs\.sh -hadoop-auth[\.\-_].*jar -hadoop-nfs[\.\-_].*jar -hadoop-nfs-2\.7\.[0-9][\.\-_].*jar -client -client/curator-recipes[\.\-_].*jar -client/curator-recipes-2\.7\.1[\.\-_].*jar -client/commons-configuration[\.\-_].*jar -client/jsr305[\.\-_].*jar -client/slf4j-log4j12[\.\-_].*jar -client/hadoop-mapreduce-client-core[\.\-_].*jar -client/hadoop-hdfs[\.\-_].*jar -client/commons-configuration-1\.6[\.\-_].*jar -client/commons-cli-1\.2[\.\-_].*jar -client/hadoop-mapreduce-client-common-2\.7\.[0-9][\.\-_].*jar -client/commons-digester-1\.8[\.\-_].*jar -client/curator-client-2\.7\.1[\.\-_].*jar -client/httpclient[\.\-_].*jar -client/commons-beanutils(-core)?-1\.[78]\.0[\.\-_].*jar -client/jsp-api-2\.1[\.\-_].*jar -client/leveldbjni-all-1\.8[\.\-_].*jar -client/slf4j-api-1\.7\.10[\.\-_].*jar -client/hadoop-annotations-2\.7\.[0-9][\.\-_].*jar -client/jersey-core[\.\-_].*jar -client/commons-compress[\.\-_].*jar -client/stax-api[\.\-_].*jar -client/jaxb-api-2\.2\.2[\.\-_].*jar -client/api-util-1\.0\.0-M20[\.\-_].*jar -client/jackson-xc[\.\-_].*jar -client/commons-cli[\.\-_].*jar -client/xml-apis[\.\-_].*jar -client/curator-client[\.\-_].*jar -client/curator-framework-2\.7\.1[\.\-_].*jar -client/commons-io-2\.4[\.\-_].*jar -client/jackson-core-asl[\.\-_].*jar -client/avro[\.\-_].*jar -client/hadoop-mapreduce-client-app[\.\-_].*jar -client/jetty-util[\.\-_].*jar -client/guava[\.\-_].*jar -client/commons-beanutils[\.\-_].*jar -client/apacheds-i18n[\.\-_].*jar -client/jetty-util-6\.1\.26[\.\-_].*jar -client/xercesImpl-2\.9\.1[\.\-_].*jar -client/commons-logging[\.\-_].*jar -client/slf4j-api[\.\-_].*jar -client/commons-digester[\.\-_].*jar -client/avro-1\.7\.[4-7][\.\-_].*jar -client/hadoop-common-2\.7\.[0-9][\.\-_].*jar -client/commons-math3[\.\-_].*jar -client/hadoop-yarn-common-2\.7\.[0-9][\.\-_].*jar -client/hadoop-annotations[\.\-_].*jar -client/xercesImpl[\.\-_].*jar -client/commons-codec[\.\-_].*jar -client/netty-3\.6\.2\.Final[\.\-_].*jar -client/commons-collections[\.\-_].*jar -client/httpcore-4\.[0-9]\.[0-9][\.\-_].*jar -client/hadoop-mapreduce-client-jobclient[\.\-_].*jar -client/htrace-core[\.\-_].*jar -client/jersey-core-1\.9[\.\-_].*jar -client/xz[\.\-_].*jar -client/jackson-mapper-asl-1\.9\.13[\.\-_].*jar -client/jsp-api[\.\-_].*jar -client/commons-httpclient[\.\-_].*jar -client/netty[\.\-_].*jar -client/hadoop-mapreduce-client-shuffle-2\.7\.[0-9][\.\-_].*jar -client/commons-net[\.\-_].*jar -client/hadoop-yarn-server-common[\.\-_].*jar -client/jaxb-api[\.\-_].*jar -client/apacheds-kerberos-codec[\.\-_].*jar -client/httpcore[\.\-_].*jar -client/hadoop-yarn-server-common-2\.7\.[0-9][\.\-_].*jar -client/hadoop-common[\.\-_].*jar -client/leveldbjni-all[\.\-_].*jar -client/snappy-java-1\.0\.[45](\.[0-9])?[\.\-_].*jar -client/gson-2\.2\.4[\.\-_].*jar -client/commons-net-3\.1[\.\-_].*jar -client/api-util[\.\-_].*jar -client/commons-compress-1\.4\.1[\.\-_].*jar -client/jackson-xc-1\.9\.13[\.\-_].*jar -client/netty-all-4\.0\.23\.Final[\.\-_].*jar -client/xmlenc-0\.52[\.\-_].*jar -client/jackson-jaxrs[\.\-_].*jar -client/api-asn1-api[\.\-_].*jar -client/api-asn1-api-1\.0\.0-M20[\.\-_].*jar -client/commons-codec-1\.4[\.\-_].*jar -client/jackson-core-asl-1\.9\.13[\.\-_].*jar -client/servlet-api-2\.5[\.\-_].*jar -client/commons-beanutils(-core)?[\.\-_].*jar -client/paranamer-2\.3[\.\-_].*jar -client/hadoop-yarn-api-2\.7\.[0-9][\.\-_].*jar -client/hadoop-mapreduce-client-shuffle[\.\-_].*jar -client/apacheds-i18n-2\.0\.0-M15[\.\-_].*jar -client/hadoop-yarn-common[\.\-_].*jar -client/hadoop-auth-2\.7\.[0-9][\.\-_].*jar -client/snappy-java[\.\-_].*jar -client/gson[\.\-_].*jar -client/xml-apis-1\.3\.04[\.\-_].*jar -client/commons-io[\.\-_].*jar -client/commons-math3-3\.1\.1[\.\-_].*jar -client/log4j[\.\-_].*jar -client/hadoop-auth[\.\-_].*jar -client/log4j-1\.2\.17[\.\-_].*jar -client/servlet-api[\.\-_].*jar -client/hadoop-hdfs-2\.7\.[0-9][\.\-_].*jar -client/activation[\.\-_].*jar -client/zookeeper[\.\-_].*jar -client/xmlenc[\.\-_].*jar -client/stax-api-1\.0-2[\.\-_].*jar -client/hadoop-yarn-client-2\.7\.[0-9][\.\-_].*jar -client/jersey-client-1\.9[\.\-_].*jar -client/hadoop-mapreduce-client-common[\.\-_].*jar -client/xz-1\.0[\.\-_].*jar -client/zookeeper-3\.4\.6[\.\-_].*jar -client/activation-1\.1[\.\-_].*jar -client/hadoop-mapreduce-client-jobclient-2\.7\.[0-9][\.\-_].*jar -client/htrace-core-3\.1\.0-incubating[\.\-_].*jar -client/protobuf-java-2\.5\.0[\.\-_].*jar -client/hadoop-mapreduce-client-app-2\.7\.[0-9][\.\-_].*jar -client/apacheds-kerberos-codec-2\.0\.0-M15[\.\-_].*jar -client/commons-lang[\.\-_].*jar -client/httpclient-4\.[0-9]\.[0-9][\.\-_].*jar -client/paranamer[\.\-_].*jar -client/hadoop-yarn-api[\.\-_].*jar -client/jersey-client[\.\-_].*jar -client/hadoop-mapreduce-client-core-2\.7\.[0-9][\.\-_].*jar -client/curator-framework[\.\-_].*jar -client/guava-11\.0\.2[\.\-_].*jar -client/jsr305-3\.0\.0[\.\-_].*jar -client/hadoop-yarn-client[\.\-_].*jar -client/jackson-jaxrs-1\.9\.13[\.\-_].*jar -client/commons-httpclient-3\.1[\.\-_].*jar -client/commons-collections-3\.2\.[12][\.\-_].*jar -client/netty-all[\.\-_].*jar -client/slf4j-log4j12-1\.7\.10[\.\-_].*jar -client/protobuf-java[\.\-_].*jar -client/jackson-mapper-asl[\.\-_].*jar -client/commons-logging-1\.1\.3[\.\-_].*jar -client/commons-lang-2\.6[\.\-_].*jar -lib -lib/curator-recipes-2\.7\.1[\.\-_].*jar -lib/commons-configuration-1\.6[\.\-_].*jar -lib/commons-cli-1\.2[\.\-_].*jar -lib/commons-digester-1\.8[\.\-_].*jar -lib/curator-client-2\.7\.1[\.\-_].*jar -lib/commons-beanutils(-core)?-1\.[78]\.0[\.\-_].*jar -lib/jsp-api-2\.1[\.\-_].*jar -lib/jets3t-0\.9\.0[\.\-_].*jar -lib/slf4j-api-1\.7\.10[\.\-_].*jar -lib/jaxb-api-2\.2\.2[\.\-_].*jar -lib/api-util-1\.0\.0-M20[\.\-_].*jar -lib/jettison-1\.1[\.\-_].*jar -lib/curator-framework-2\.7\.1[\.\-_].*jar -lib/commons-io-2\.4[\.\-_].*jar -lib/jetty-util-6\.1\.26[\.\-_].*jar -lib/avro-1\.7\.[4-7][\.\-_].*jar -lib/jaxb-impl-2\.2\.3-1[\.\-_].*jar -lib/netty-3\.6\.2\.Final[\.\-_].*jar -lib/httpcore-4\.[0-9]\.[0-9][\.\-_].*jar -lib/jsch-0\.1\.(4[2-9]|[5-9]\d)[\.\-_].*jar -lib/jersey-core-1\.9[\.\-_].*jar -lib/jackson-mapper-asl-1\.9\.13[\.\-_].*jar -lib/snappy-java-1\.0\.[45](\.[0-9])?[\.\-_].*jar -lib/gson-2\.2\.4[\.\-_].*jar -lib/commons-net-3\.1[\.\-_].*jar -lib/asm-3\.2[\.\-_].*jar -lib/commons-compress-1\.4\.1[\.\-_].*jar -lib/mockito-all-1\.8\.5[\.\-_].*jar -lib/jackson-xc-1\.9\.13[\.\-_].*jar -lib/junit-4\.11[\.\-_].*jar -lib/jersey-json-1\.9[\.\-_].*jar -lib/xmlenc-0\.52[\.\-_].*jar -lib/api-asn1-api-1\.0\.0-M20[\.\-_].*jar -lib/commons-codec-1\.4[\.\-_].*jar -lib/jackson-core-asl-1\.9\.13[\.\-_].*jar -lib/servlet-api-2\.5[\.\-_].*jar -lib/paranamer-2\.3[\.\-_].*jar -lib/native -lib/native/libhadoop\.a -lib/native/libhadoop\.so -lib/native/libhdfs\.a -lib/native/libsnappy\.so[.0-9]* -lib/native/libsnappy\.so -lib/native/libhadoop\.so[.0-9]* -lib/native/libhadooputils\.a -lib/native/libsnappy\.so[.0-9]* -lib/native/libhadooppipes\.a -lib/jetty-6\.1\.26[\.\-_].*jar -lib/jersey-server-1\.9[\.\-_].*jar -lib/apacheds-i18n-2\.0\.0-M15[\.\-_].*jar -lib/commons-math3-3\.1\.1[\.\-_].*jar -lib/log4j-1\.2\.17[\.\-_].*jar -lib/hamcrest-core-1\.3[\.\-_].*jar -lib/stax-api-1\.0-2[\.\-_].*jar -lib/xz-1\.0[\.\-_].*jar -lib/zookeeper-3\.4\.6[\.\-_].*jar -lib/activation-1\.1[\.\-_].*jar -lib/htrace-core-3\.1\.0-incubating[\.\-_].*jar -lib/protobuf-java-2\.5\.0[\.\-_].*jar -lib/apacheds-kerberos-codec-2\.0\.0-M15[\.\-_].*jar -lib/java-xmlbuilder-0\.4[\.\-_].*jar -lib/httpclient-4\.[0-9]\.[0-9][\.\-_].*jar -lib/guava-11\.0\.2[\.\-_].*jar -lib/jsr305-3\.0\.0[\.\-_].*jar -lib/jackson-jaxrs-1\.9\.13[\.\-_].*jar -lib/commons-httpclient-3\.1[\.\-_].*jar -lib/commons-collections-3\.2\.[12][\.\-_].*jar -lib/slf4j-log4j12-1\.7\.10[\.\-_].*jar -lib/commons-logging-1\.1\.3[\.\-_].*jar -lib/commons-lang-2\.6[\.\-_].*jar diff --git a/bigtop-tests/spec-tests/runtime/src/test/resources/hadoop-hdfs-2.7.3-api-report.json b/bigtop-tests/spec-tests/runtime/src/test/resources/hadoop-hdfs-2.7.3-api-report.json deleted file mode 100644 index b5e22655..00000000 --- a/bigtop-tests/spec-tests/runtime/src/test/resources/hadoop-hdfs-2.7.3-api-report.json +++ /dev/null @@ -1 +0,0 @@ -{"name":"hadoop-hdfs","version":"2.7.3","classes":{"org.apache.hadoop.hdfs.server.namenode.NameNodeMXBean":{"name":"org.apache.hadoop.hdfs.server.namenode.NameNodeMXBean","methods":{"long getTotal()":{"name":"getTotal","returnType":"long","args":[],"exceptions":[]},"java.lang.String getDeadNodes()":{"name":"getDeadNodes","returnType":"java.lang.String","args":[],"exceptions":[]},"int getDistinctVersionCount()":{"name":"getDistinctVersionCount","returnType":"int","args":[],"exceptions":[]},"org.apache.hadoop.hdfs.protocol.RollingUpgradeInfo$Bean getRollingUpgradeStatus()":{"name":"getRollingUpgradeStatus","returnType":"org.apache.hadoop.hdfs.protocol.RollingUpgradeInfo$Bean","args":[],"exceptions":[]},"java.lang.String getVersion()":{"name":"getVersion","returnType":"java.lang.String","args":[],"exceptions":[]},"java.util.Map getDistinctVersions()":{"name":"getDistinctVersions","returnType":"java.util.Map","args":[],"exceptions":[]},"int getThreads()":{"name":"getThreads","returnType":"int","args":[],"exceptions":[]},"java.lang.String getJournalTransactionInfo()":{"name":"getJournalTransactionInfo","returnType":"java.lang.String","args":[],"exceptions":[]},"float getPercentBlockPoolUsed()":{"name":"getPercentBlockPoolUsed","returnType":"float","args":[],"exceptions":[]},"java.lang.String getClusterId()":{"name":"getClusterId","returnType":"java.lang.String","args":[],"exceptions":[]},"java.lang.String getLiveNodes()":{"name":"getLiveNodes","returnType":"java.lang.String","args":[],"exceptions":[]},"long getBlockPoolUsedSpace()":{"name":"getBlockPoolUsedSpace","returnType":"long","args":[],"exceptions":[]},"java.lang.String getSafemode()":{"name":"getSafemode","returnType":"java.lang.String","args":[],"exceptions":[]},"java.lang.String getCorruptFiles()":{"name":"getCorruptFiles","returnType":"java.lang.String","args":[],"exceptions":[]},"java.lang.String getSoftwareVersion()":{"name":"getSoftwareVersion","returnType":"java.lang.String","args":[],"exceptions":[]},"long getTotalFiles()":{"name":"getTotalFiles","returnType":"long","args":[],"exceptions":[]},"long getCacheUsed()":{"name":"getCacheUsed","returnType":"long","args":[],"exceptions":[]},"java.lang.String getNameDirStatuses()":{"name":"getNameDirStatuses","returnType":"java.lang.String","args":[],"exceptions":[]},"java.lang.String getCompileInfo()":{"name":"getCompileInfo","returnType":"java.lang.String","args":[],"exceptions":[]},"java.lang.String getNodeUsage()":{"name":"getNodeUsage","returnType":"java.lang.String","args":[],"exceptions":[]},"long getNumberOfMissingBlocksWithReplicationFactorOne()":{"name":"getNumberOfMissingBlocksWithReplicationFactorOne","returnType":"long","args":[],"exceptions":[]},"java.lang.String getNameJournalStatus()":{"name":"getNameJournalStatus","returnType":"java.lang.String","args":[],"exceptions":[]},"long getNonDfsUsedSpace()":{"name":"getNonDfsUsedSpace","returnType":"long","args":[],"exceptions":[]},"java.lang.String getNNStarted()":{"name":"getNNStarted","returnType":"java.lang.String","args":[],"exceptions":[]},"float getPercentRemaining()":{"name":"getPercentRemaining","returnType":"float","args":[],"exceptions":[]},"boolean isUpgradeFinalized()":{"name":"isUpgradeFinalized","returnType":"boolean","args":[],"exceptions":[]},"long getTotalBlocks()":{"name":"getTotalBlocks","returnType":"long","args":[],"exceptions":[]},"java.lang.String getBlockPoolId()":{"name":"getBlockPoolId","returnType":"java.lang.String","args":[],"exceptions":[]},"long getUsed()":{"name":"getUsed","returnType":"long","args":[],"exceptions":[]},"long getNumberOfMissingBlocks()":{"name":"getNumberOfMissingBlocks","returnType":"long","args":[],"exceptions":[]},"java.lang.String getDecomNodes()":{"name":"getDecomNodes","returnType":"java.lang.String","args":[],"exceptions":[]},"long getFree()":{"name":"getFree","returnType":"long","args":[],"exceptions":[]},"float getPercentUsed()":{"name":"getPercentUsed","returnType":"float","args":[],"exceptions":[]},"long getCacheCapacity()":{"name":"getCacheCapacity","returnType":"long","args":[],"exceptions":[]}}},"org.apache.hadoop.hdfs.server.datanode.DataNodeMXBean":{"name":"org.apache.hadoop.hdfs.server.datanode.DataNodeMXBean","methods":{"java.util.Map getDatanodeNetworkCounts()":{"name":"getDatanodeNetworkCounts","returnType":"java.util.Map","args":[],"exceptions":[]},"java.lang.String getClusterId()":{"name":"getClusterId","returnType":"java.lang.String","args":[],"exceptions":[]},"int getXceiverCount()":{"name":"getXceiverCount","returnType":"int","args":[],"exceptions":[]},"java.lang.String getHttpPort()":{"name":"getHttpPort","returnType":"java.lang.String","args":[],"exceptions":[]},"java.lang.String getVersion()":{"name":"getVersion","returnType":"java.lang.String","args":[],"exceptions":[]},"java.lang.String getNamenodeAddresses()":{"name":"getNamenodeAddresses","returnType":"java.lang.String","args":[],"exceptions":[]},"java.lang.String getVolumeInfo()":{"name":"getVolumeInfo","returnType":"java.lang.String","args":[],"exceptions":[]},"java.lang.String getRpcPort()":{"name":"getRpcPort","returnType":"java.lang.String","args":[],"exceptions":[]}}},"org.apache.hadoop.hdfs.UnknownCipherSuiteException":{"name":"org.apache.hadoop.hdfs.UnknownCipherSuiteException","methods":{}}}} \ No newline at end of file diff --git a/bigtop-tests/spec-tests/runtime/src/test/resources/hadoop-hdfs-bin.list b/bigtop-tests/spec-tests/runtime/src/test/resources/hadoop-hdfs-bin.list deleted file mode 100644 index 88879870..00000000 --- a/bigtop-tests/spec-tests/runtime/src/test/resources/hadoop-hdfs-bin.list +++ /dev/null @@ -1 +0,0 @@ -hdfs diff --git a/bigtop-tests/spec-tests/runtime/src/test/resources/hadoop-hdfs-jar.list b/bigtop-tests/spec-tests/runtime/src/test/resources/hadoop-hdfs-jar.list deleted file mode 100644 index 8355c581..00000000 --- a/bigtop-tests/spec-tests/runtime/src/test/resources/hadoop-hdfs-jar.list +++ /dev/null @@ -1,25 +0,0 @@ -netty-3\.6\.2\.Final[\.\-_].*jar -leveldbjni-all-1\.8[\.\-_].*jar -jackson-core-asl-1\.9\.13[\.\-_].*jar -jersey-server-1\.9[\.\-_].*jar -htrace-core-3\.1\.0-incubating[\.\-_].*jar -commons-daemon-1\.0\.13[\.\-_].*jar -commons-cli-1\.2[\.\-_].*jar -log4j-1\.2\.17[\.\-_].*jar -jackson-mapper-asl-1\.9\.13[\.\-_].*jar -guava-11\.0\.2[\.\-_].*jar -jetty-6\.1\.26[\.\-_].*jar -commons-logging-1\.1\.3[\.\-_].*jar -jersey-core-1\.9[\.\-_].*jar -asm-3\.2[\.\-_].*jar -commons-codec-1\.4[\.\-_].*jar -xml-apis-1\.3\.04[\.\-_].*jar -xercesImpl-2\.9\.1[\.\-_].*jar -xmlenc-0\.52[\.\-_].*jar -commons-lang-2\.6[\.\-_].*jar -netty-all-4\.0\.23\.Final[\.\-_].*jar -jetty-util-6\.1\.26[\.\-_].*jar -jsr305-3\.0\.0[\.\-_].*jar -protobuf-java-2\.5\.0[\.\-_].*jar -commons-io-2\.4[\.\-_].*jar -servlet-api-2\.5[\.\-_].*jar diff --git a/bigtop-tests/spec-tests/runtime/src/test/resources/hadoop-hdfs.list b/bigtop-tests/spec-tests/runtime/src/test/resources/hadoop-hdfs.list deleted file mode 100644 index 12565fd2..00000000 --- a/bigtop-tests/spec-tests/runtime/src/test/resources/hadoop-hdfs.list +++ /dev/null @@ -1,79 +0,0 @@ -webapps -webapps/journal -webapps/journal/index\.html -webapps/journal/WEB-INF -webapps/journal/WEB-INF/web\.xml -webapps/secondary -webapps/secondary/index\.html -webapps/secondary/status\.html -webapps/secondary/WEB-INF -webapps/secondary/WEB-INF/web\.xml -webapps/secondary/snn\.js -webapps/hdfs -webapps/hdfs/dfshealth\.html -webapps/hdfs/index\.html -webapps/hdfs/explorer\.js -webapps/hdfs/dfshealth\.js -webapps/hdfs/WEB-INF -webapps/hdfs/WEB-INF/web\.xml -webapps/hdfs/explorer\.html -webapps/datanode -webapps/datanode/index\.html -webapps/datanode/robots\.txt -webapps/datanode/WEB-INF -webapps/datanode/WEB-INF/web\.xml -webapps/nfs3 -webapps/nfs3/WEB-INF -webapps/nfs3/WEB-INF/web\.xml -webapps/static -webapps/static/hadoop\.css -webapps/static/bootstrap-3\.0\.2 -webapps/static/bootstrap-3\.0\.2/fonts -webapps/static/bootstrap-3\.0\.([2-9]|[3-9]\d+).*\.svg -webapps/static/bootstrap-3\.0\.([2-9]|[3-9]\d+).*\.eot -webapps/static/bootstrap-3\.0\.([2-9]|[3-9]\d+).*\.woff -webapps/static/bootstrap-3\.0\.([2-9]|[3-9]\d+).*\.ttf -webapps/static/bootstrap-3\.0\.2/css -webapps/static/bootstrap-3\.0\.([2-9]|[3-9]\d+).*\.css -webapps/static/bootstrap-3\.0\.2/js -webapps/static/bootstrap-3\.0\.([2-9]|[3-9]\d+).*\.js -webapps/static/jquery-1\.10\.([2-9]|[3-9]\d+).*\.js -webapps/static/dust-helpers-1\.1\.([1-9]|[2-9]\d+).*\.js -webapps/static/dust-full-2\.0\.\d+.*\.js -webapps/static/dfs-dust\.js -hadoop-hdfs\.jar -bin -bin/hdfs -sbin -sbin/distribute-exclude\.sh -sbin/refresh-namenodes\.sh -hadoop-hdfs-nfs-2\.7\.([1-9]|[2-9]\d+).*\.jar -hadoop-hdfs-2\.7\.([1-9]|[2-9]\d+).*\.jar -hadoop-hdfs-2\.7\.([1-9]|[2-9]\d+).*\.jar -hadoop-hdfs-nfs\.jar -lib -lib/commons-daemon-1\.0\.(1[3-9]|[2-9]\d).*\.jar -lib/commons-cli-1\.([2-9]|[3-9]\d+).*\.jar -lib/leveldbjni-all-1\.([8-9]|[9-9]\d+).*\.jar -lib/commons-io-2\.([4-9]|[5-9]\d+).*\.jar -lib/jetty-util-6\.1\.(2[6-9]|[3-9]\d).*\.jar -lib/xercesImpl-2\.9\.([1-9]|[2-9]\d+).*\.jar -lib/netty-3\.6\.([2-9]|[3-9]\d+).*\.jar -lib/jersey-core-1\.(9|[1-9]\d+).*\.jar -lib/jackson-mapper-asl-1\.9\.(1[3-9]|[2-9]\d).*\.jar -lib/asm-3\.([2-9]|[3-9]\d+).*\.jar -lib/netty-all-4\.0\.(2[3-9]|[3-9]\d).*\.jar -lib/xmlenc-0\.(5[2-9]|[6-9]\d).*\.jar -lib/commons-codec-1\.([4-9]|[5-9]\d+).*\.jar -lib/jackson-core-asl-1\.9\.(1[3-9]|[2-9]\d).*\.jar -lib/servlet-api-2\.([5-9]|[6-9]\d+).*\.jar -lib/jetty-6\.1\.(2[6-9]|[3-9]\d).*\.jar -lib/jersey-server-1\.(9|[1-9]\d+).*\.jar -lib/xml-apis-1\.3\.(0[4-9]|[1-9]\d).*\.jar -lib/log4j-1\.2\.(1[7-9]|[2-9]\d).*\.jar -lib/htrace-core-3\.1\.\d+.*\.jar -lib/protobuf-java-2\.5\.\d+.*\.jar -lib/guava-11\.0\.([2-9]|[3-9]\d+).*\.jar -lib/jsr305-3\.0\.\d+.*\.jar -lib/commons-logging-1\.1\.([3-9]|[4-9]\d+).*\.jar -lib/commons-lang-2\.([6-9]|[7-9]\d+).*\.jar diff --git a/bigtop-tests/spec-tests/runtime/src/test/resources/hadoop-mapreduce-bin.list b/bigtop-tests/spec-tests/runtime/src/test/resources/hadoop-mapreduce-bin.list deleted file mode 100644 index 0a7a9c57..00000000 --- a/bigtop-tests/spec-tests/runtime/src/test/resources/hadoop-mapreduce-bin.list +++ /dev/null @@ -1 +0,0 @@ -mapred diff --git a/bigtop-tests/spec-tests/runtime/src/test/resources/hadoop-mapreduce-client-core-2.7.3-api-report.json b/bigtop-tests/spec-tests/runtime/src/test/resources/hadoop-mapreduce-client-core-2.7.3-api-report.json deleted file mode 100644 index 6061c5ea..00000000 --- a/bigtop-tests/spec-tests/runtime/src/test/resources/hadoop-mapreduce-client-core-2.7.3-api-report.json +++ /dev/null @@ -1 +0,0 @@ -{"name":"hadoop-mapreduce-client-core","version":"2.7.3","classes":{"org.apache.hadoop.mapred.FixedLengthInputFormat":{"name":"org.apache.hadoop.mapred.FixedLengthInputFormat","methods":{"org.apache.hadoop.mapred.RecordReader getRecordReader(org.apache.hadoop.mapred.InputSplit, org.apache.hadoop.mapred.JobConf, org.apache.hadoop.mapred.Reporter) throws java.io.IOException":{"name":"getRecordReader","returnType":"org.apache.hadoop.mapred.RecordReader","args":["org.apache.hadoop.mapred.InputSplit","org.apache.hadoop.mapred.JobConf","org.apache.hadoop.mapred.Reporter"],"exceptions":["java.io.IOException"]},"void setRecordLength(org.apache.hadoop.conf.Configuration, int)":{"name":"setRecordLength","returnType":"void","args":["org.apache.hadoop.conf.Configuration","int"],"exceptions":[]},"void configure(org.apache.hadoop.mapred.JobConf)":{"name":"configure","returnType":"void","args":["org.apache.hadoop.mapred.JobConf"],"exceptions":[]},"int getRecordLength(org.apache.hadoop.conf.Configuration)":{"name":"getRecordLength","returnType":"int","args":["org.apache.hadoop.conf.Configuration"],"exceptions":[]}}},"org.apache.hadoop.mapred.lib.CombineFileSplit":{"name":"org.apache.hadoop.mapred.lib.CombineFileSplit","methods":{"org.apache.hadoop.mapred.JobConf getJob()":{"name":"getJob","returnType":"org.apache.hadoop.mapred.JobConf","args":[],"exceptions":[]}}},"org.apache.hadoop.mapred.lib.aggregate.ValueAggregatorMapper":{"name":"org.apache.hadoop.mapred.lib.aggregate.ValueAggregatorMapper","methods":{"void map(java.lang.Object, java.lang.Object, org.apache.hadoop.mapred.OutputCollector, org.apache.hadoop.mapred.Reporter) throws java.io.IOException":{"name":"map","returnType":"void","args":["java.lang.Object","java.lang.Object","org.apache.hadoop.mapred.OutputCollector","org.apache.hadoop.mapred.Reporter"],"exceptions":["java.io.IOException"]},"void reduce(java.lang.Object, java.util.Iterator, org.apache.hadoop.mapred.OutputCollector, org.apache.hadoop.mapred.Reporter) throws java.io.IOException":{"name":"reduce","returnType":"void","args":["java.lang.Object","java.util.Iterator","org.apache.hadoop.mapred.OutputCollector","org.apache.hadoop.mapred.Reporter"],"exceptions":["java.io.IOException"]},"void map(org.apache.hadoop.io.WritableComparable, org.apache.hadoop.io.Writable, org.apache.hadoop.mapred.OutputCollector, org.apache.hadoop.mapred.Reporter) throws java.io.IOException":{"name":"map","returnType":"void","args":["org.apache.hadoop.io.WritableComparable","org.apache.hadoop.io.Writable","org.apache.hadoop.mapred.OutputCollector","org.apache.hadoop.mapred.Reporter"],"exceptions":["java.io.IOException"]},"void reduce(org.apache.hadoop.io.Text, java.util.Iterator, org.apache.hadoop.mapred.OutputCollector, org.apache.hadoop.mapred.Reporter) throws java.io.IOException":{"name":"reduce","returnType":"void","args":["org.apache.hadoop.io.Text","java.util.Iterator","org.apache.hadoop.mapred.OutputCollector","org.apache.hadoop.mapred.Reporter"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.mapred.lib.HashPartitioner":{"name":"org.apache.hadoop.mapred.lib.HashPartitioner","methods":{"int getPartition(java.lang.Object, java.lang.Object, int)":{"name":"getPartition","returnType":"int","args":["java.lang.Object","java.lang.Object","int"],"exceptions":[]},"void configure(org.apache.hadoop.mapred.JobConf)":{"name":"configure","returnType":"void","args":["org.apache.hadoop.mapred.JobConf"],"exceptions":[]}}},"org.apache.hadoop.mapreduce.OutputFormat":{"name":"org.apache.hadoop.mapreduce.OutputFormat","methods":{"org.apache.hadoop.mapreduce.OutputCommitter getOutputCommitter(org.apache.hadoop.mapreduce.TaskAttemptContext) throws java.lang.InterruptedException, java.io.IOException":{"name":"getOutputCommitter","returnType":"org.apache.hadoop.mapreduce.OutputCommitter","args":["org.apache.hadoop.mapreduce.TaskAttemptContext"],"exceptions":["java.lang.InterruptedException","java.io.IOException"]},"void checkOutputSpecs(org.apache.hadoop.mapreduce.JobContext) throws java.lang.InterruptedException, java.io.IOException":{"name":"checkOutputSpecs","returnType":"void","args":["org.apache.hadoop.mapreduce.JobContext"],"exceptions":["java.lang.InterruptedException","java.io.IOException"]},"org.apache.hadoop.mapreduce.RecordWriter getRecordWriter(org.apache.hadoop.mapreduce.TaskAttemptContext) throws java.lang.InterruptedException, java.io.IOException":{"name":"getRecordWriter","returnType":"org.apache.hadoop.mapreduce.RecordWriter","args":["org.apache.hadoop.mapreduce.TaskAttemptContext"],"exceptions":["java.lang.InterruptedException","java.io.IOException"]}}},"org.apache.hadoop.mapreduce.CounterGroup":{"name":"org.apache.hadoop.mapreduce.CounterGroup","methods":{}},"org.apache.hadoop.mapred.lib.aggregate.ValueAggregatorJob":{"name":"org.apache.hadoop.mapred.lib.aggregate.ValueAggregatorJob","methods":{"org.apache.hadoop.mapred.jobcontrol.JobControl createValueAggregatorJobs([Ljava.lang.String;, [Ljava.lang.Class;) throws java.io.IOException":{"name":"createValueAggregatorJobs","returnType":"org.apache.hadoop.mapred.jobcontrol.JobControl","args":["[Ljava.lang.String;","[Ljava.lang.Class;"],"exceptions":["java.io.IOException"]},"void setAggregatorDescriptors(org.apache.hadoop.mapred.JobConf, [Ljava.lang.Class;)":{"name":"setAggregatorDescriptors","returnType":"void","args":["org.apache.hadoop.mapred.JobConf","[Ljava.lang.Class;"],"exceptions":[]},"org.apache.hadoop.mapred.JobConf createValueAggregatorJob([Ljava.lang.String;, [Ljava.lang.Class;) throws java.io.IOException":{"name":"createValueAggregatorJob","returnType":"org.apache.hadoop.mapred.JobConf","args":["[Ljava.lang.String;","[Ljava.lang.Class;"],"exceptions":["java.io.IOException"]},"void main([Ljava.lang.String;) throws java.io.IOException":{"name":"main","returnType":"void","args":["[Ljava.lang.String;"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.mapred.JobConf createValueAggregatorJob([Ljava.lang.String;, [Ljava.lang.Class;, java.lang.Class) throws java.io.IOException":{"name":"createValueAggregatorJob","returnType":"org.apache.hadoop.mapred.JobConf","args":["[Ljava.lang.String;","[Ljava.lang.Class;","java.lang.Class"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.mapred.jobcontrol.JobControl createValueAggregatorJobs([Ljava.lang.String;) throws java.io.IOException":{"name":"createValueAggregatorJobs","returnType":"org.apache.hadoop.mapred.jobcontrol.JobControl","args":["[Ljava.lang.String;"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.mapred.JobConf createValueAggregatorJob([Ljava.lang.String;) throws java.io.IOException":{"name":"createValueAggregatorJob","returnType":"org.apache.hadoop.mapred.JobConf","args":["[Ljava.lang.String;"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.mapred.JobConf createValueAggregatorJob([Ljava.lang.String;, java.lang.Class) throws java.io.IOException":{"name":"createValueAggregatorJob","returnType":"org.apache.hadoop.mapred.JobConf","args":["[Ljava.lang.String;","java.lang.Class"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.mapreduce.lib.input.InvalidInputException":{"name":"org.apache.hadoop.mapreduce.lib.input.InvalidInputException","methods":{"java.util.List getProblems()":{"name":"getProblems","returnType":"java.util.List","args":[],"exceptions":[]},"java.lang.String getMessage()":{"name":"getMessage","returnType":"java.lang.String","args":[],"exceptions":[]}}},"org.apache.hadoop.mapred.lib.aggregate.UserDefinedValueAggregatorDescriptor":{"name":"org.apache.hadoop.mapred.lib.aggregate.UserDefinedValueAggregatorDescriptor","methods":{"java.lang.Object createInstance(java.lang.String)":{"name":"createInstance","returnType":"java.lang.Object","args":["java.lang.String"],"exceptions":[]},"void configure(org.apache.hadoop.mapred.JobConf)":{"name":"configure","returnType":"void","args":["org.apache.hadoop.mapred.JobConf"],"exceptions":[]}}},"org.apache.hadoop.mapred.Counters$Counter":{"name":"org.apache.hadoop.mapred.Counters$Counter","methods":{"int hashCode()":{"name":"hashCode","returnType":"int","args":[],"exceptions":[]},"void setDisplayName(java.lang.String)":{"name":"setDisplayName","returnType":"void","args":["java.lang.String"],"exceptions":[]},"boolean contentEquals(org.apache.hadoop.mapred.Counters$Counter)":{"name":"contentEquals","returnType":"boolean","args":["org.apache.hadoop.mapred.Counters$Counter"],"exceptions":[]},"java.lang.String makeEscapedCompactString()":{"name":"makeEscapedCompactString","returnType":"java.lang.String","args":[],"exceptions":[]},"boolean equals(java.lang.Object)":{"name":"equals","returnType":"boolean","args":["java.lang.Object"],"exceptions":[]},"long getValue()":{"name":"getValue","returnType":"long","args":[],"exceptions":[]},"void write(java.io.DataOutput) throws java.io.IOException":{"name":"write","returnType":"void","args":["java.io.DataOutput"],"exceptions":["java.io.IOException"]},"java.lang.String getName()":{"name":"getName","returnType":"java.lang.String","args":[],"exceptions":[]},"org.apache.hadoop.mapreduce.Counter getUnderlyingCounter()":{"name":"getUnderlyingCounter","returnType":"org.apache.hadoop.mapreduce.Counter","args":[],"exceptions":[]},"void increment(long)":{"name":"increment","returnType":"void","args":["long"],"exceptions":[]},"void setValue(long)":{"name":"setValue","returnType":"void","args":["long"],"exceptions":[]},"java.lang.String getDisplayName()":{"name":"getDisplayName","returnType":"java.lang.String","args":[],"exceptions":[]},"void readFields(java.io.DataInput) throws java.io.IOException":{"name":"readFields","returnType":"void","args":["java.io.DataInput"],"exceptions":["java.io.IOException"]},"long getCounter()":{"name":"getCounter","returnType":"long","args":[],"exceptions":[]}}},"org.apache.hadoop.mapred.lib.CombineFileRecordReaderWrapper":{"name":"org.apache.hadoop.mapred.lib.CombineFileRecordReaderWrapper","methods":{"long getPos() throws java.io.IOException":{"name":"getPos","returnType":"long","args":[],"exceptions":["java.io.IOException"]},"float getProgress() throws java.io.IOException":{"name":"getProgress","returnType":"float","args":[],"exceptions":["java.io.IOException"]},"java.lang.Object createKey()":{"name":"createKey","returnType":"java.lang.Object","args":[],"exceptions":[]},"java.lang.Object createValue()":{"name":"createValue","returnType":"java.lang.Object","args":[],"exceptions":[]},"boolean next(java.lang.Object, java.lang.Object) throws java.io.IOException":{"name":"next","returnType":"boolean","args":["java.lang.Object","java.lang.Object"],"exceptions":["java.io.IOException"]},"void close() throws java.io.IOException":{"name":"close","returnType":"void","args":[],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.mapred.lib.LongSumReducer":{"name":"org.apache.hadoop.mapred.lib.LongSumReducer","methods":{"void reduce(java.lang.Object, java.util.Iterator, org.apache.hadoop.mapred.OutputCollector, org.apache.hadoop.mapred.Reporter) throws java.io.IOException":{"name":"reduce","returnType":"void","args":["java.lang.Object","java.util.Iterator","org.apache.hadoop.mapred.OutputCollector","org.apache.hadoop.mapred.Reporter"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.mapreduce.lib.input.CombineFileSplit":{"name":"org.apache.hadoop.mapreduce.lib.input.CombineFileSplit","methods":{"[Ljava.lang.String; getLocations() throws java.io.IOException":{"name":"getLocations","returnType":"[Ljava.lang.String;","args":[],"exceptions":["java.io.IOException"]},"long getLength(int)":{"name":"getLength","returnType":"long","args":["int"],"exceptions":[]},"long getLength()":{"name":"getLength","returnType":"long","args":[],"exceptions":[]},"[Lorg.apache.hadoop.fs.Path; getPaths()":{"name":"getPaths","returnType":"[Lorg.apache.hadoop.fs.Path;","args":[],"exceptions":[]},"java.lang.String toString()":{"name":"toString","returnType":"java.lang.String","args":[],"exceptions":[]},"void write(java.io.DataOutput) throws java.io.IOException":{"name":"write","returnType":"void","args":["java.io.DataOutput"],"exceptions":["java.io.IOException"]},"long getOffset(int)":{"name":"getOffset","returnType":"long","args":["int"],"exceptions":[]},"org.apache.hadoop.fs.Path getPath(int)":{"name":"getPath","returnType":"org.apache.hadoop.fs.Path","args":["int"],"exceptions":[]},"[J getLengths()":{"name":"getLengths","returnType":"[J","args":[],"exceptions":[]},"[J getStartOffsets()":{"name":"getStartOffsets","returnType":"[J","args":[],"exceptions":[]},"void readFields(java.io.DataInput) throws java.io.IOException":{"name":"readFields","returnType":"void","args":["java.io.DataInput"],"exceptions":["java.io.IOException"]},"int getNumPaths()":{"name":"getNumPaths","returnType":"int","args":[],"exceptions":[]}}},"org.apache.hadoop.mapreduce.lib.db.DBConfiguration":{"name":"org.apache.hadoop.mapreduce.lib.db.DBConfiguration","methods":{"java.lang.String getInputQuery()":{"name":"getInputQuery","returnType":"java.lang.String","args":[],"exceptions":[]},"void setInputClass(java.lang.Class)":{"name":"setInputClass","returnType":"void","args":["java.lang.Class"],"exceptions":[]},"org.apache.hadoop.conf.Configuration getConf()":{"name":"getConf","returnType":"org.apache.hadoop.conf.Configuration","args":[],"exceptions":[]},"void setOutputFieldCount(int)":{"name":"setOutputFieldCount","returnType":"void","args":["int"],"exceptions":[]},"java.lang.String getInputTableName()":{"name":"getInputTableName","returnType":"java.lang.String","args":[],"exceptions":[]},"[Ljava.lang.String; getInputFieldNames()":{"name":"getInputFieldNames","returnType":"[Ljava.lang.String;","args":[],"exceptions":[]},"void setOutputTableName(java.lang.String)":{"name":"setOutputTableName","returnType":"void","args":["java.lang.String"],"exceptions":[]},"java.sql.Connection getConnection() throws java.sql.SQLException, java.lang.ClassNotFoundException":{"name":"getConnection","returnType":"java.sql.Connection","args":[],"exceptions":["java.sql.SQLException","java.lang.ClassNotFoundException"]},"java.lang.String getInputBoundingQuery()":{"name":"getInputBoundingQuery","returnType":"java.lang.String","args":[],"exceptions":[]},"java.lang.String getInputOrderBy()":{"name":"getInputOrderBy","returnType":"java.lang.String","args":[],"exceptions":[]},"java.lang.Class getInputClass()":{"name":"getInputClass","returnType":"java.lang.Class","args":[],"exceptions":[]},"void setInputTableName(java.lang.String)":{"name":"setInputTableName","returnType":"void","args":["java.lang.String"],"exceptions":[]},"void setInputCountQuery(java.lang.String)":{"name":"setInputCountQuery","returnType":"void","args":["java.lang.String"],"exceptions":[]},"void setInputOrderBy(java.lang.String)":{"name":"setInputOrderBy","returnType":"void","args":["java.lang.String"],"exceptions":[]},"int getOutputFieldCount()":{"name":"getOutputFieldCount","returnType":"int","args":[],"exceptions":[]},"void setInputConditions(java.lang.String)":{"name":"setInputConditions","returnType":"void","args":["java.lang.String"],"exceptions":[]},"void setInputQuery(java.lang.String)":{"name":"setInputQuery","returnType":"void","args":["java.lang.String"],"exceptions":[]},"java.lang.String getInputConditions()":{"name":"getInputConditions","returnType":"java.lang.String","args":[],"exceptions":[]},"void configureDB(org.apache.hadoop.conf.Configuration, java.lang.String, java.lang.String, java.lang.String, java.lang.String)":{"name":"configureDB","returnType":"void","args":["org.apache.hadoop.conf.Configuration","java.lang.String","java.lang.String","java.lang.String","java.lang.String"],"exceptions":[]},"void configureDB(org.apache.hadoop.conf.Configuration, java.lang.String, java.lang.String)":{"name":"configureDB","returnType":"void","args":["org.apache.hadoop.conf.Configuration","java.lang.String","java.lang.String"],"exceptions":[]},"void setInputBoundingQuery(java.lang.String)":{"name":"setInputBoundingQuery","returnType":"void","args":["java.lang.String"],"exceptions":[]},"void setInputFieldNames([Ljava.lang.String;)":{"name":"setInputFieldNames","returnType":"void","args":["[Ljava.lang.String;"],"exceptions":[]},"[Ljava.lang.String; getOutputFieldNames()":{"name":"getOutputFieldNames","returnType":"[Ljava.lang.String;","args":[],"exceptions":[]},"java.lang.String getOutputTableName()":{"name":"getOutputTableName","returnType":"java.lang.String","args":[],"exceptions":[]},"void setOutputFieldNames([Ljava.lang.String;)":{"name":"setOutputFieldNames","returnType":"void","args":["[Ljava.lang.String;"],"exceptions":[]},"java.lang.String getInputCountQuery()":{"name":"getInputCountQuery","returnType":"java.lang.String","args":[],"exceptions":[]}}},"org.apache.hadoop.mapred.Partitioner":{"name":"org.apache.hadoop.mapred.Partitioner","methods":{"int getPartition(java.lang.Object, java.lang.Object, int)":{"name":"getPartition","returnType":"int","args":["java.lang.Object","java.lang.Object","int"],"exceptions":[]}}},"org.apache.hadoop.mapreduce.lib.output.FilterOutputFormat":{"name":"org.apache.hadoop.mapreduce.lib.output.FilterOutputFormat","methods":{"org.apache.hadoop.mapreduce.OutputCommitter getOutputCommitter(org.apache.hadoop.mapreduce.TaskAttemptContext) throws java.lang.InterruptedException, java.io.IOException":{"name":"getOutputCommitter","returnType":"org.apache.hadoop.mapreduce.OutputCommitter","args":["org.apache.hadoop.mapreduce.TaskAttemptContext"],"exceptions":["java.lang.InterruptedException","java.io.IOException"]},"void checkOutputSpecs(org.apache.hadoop.mapreduce.JobContext) throws java.lang.InterruptedException, java.io.IOException":{"name":"checkOutputSpecs","returnType":"void","args":["org.apache.hadoop.mapreduce.JobContext"],"exceptions":["java.lang.InterruptedException","java.io.IOException"]},"org.apache.hadoop.mapreduce.RecordWriter getRecordWriter(org.apache.hadoop.mapreduce.TaskAttemptContext) throws java.lang.InterruptedException, java.io.IOException":{"name":"getRecordWriter","returnType":"org.apache.hadoop.mapreduce.RecordWriter","args":["org.apache.hadoop.mapreduce.TaskAttemptContext"],"exceptions":["java.lang.InterruptedException","java.io.IOException"]}}},"org.apache.hadoop.mapreduce.lib.reduce.LongSumReducer":{"name":"org.apache.hadoop.mapreduce.lib.reduce.LongSumReducer","methods":{"void reduce(java.lang.Object, java.lang.Iterable, org.apache.hadoop.mapreduce.Reducer$Context) throws java.lang.InterruptedException, java.io.IOException":{"name":"reduce","returnType":"void","args":["java.lang.Object","java.lang.Iterable","org.apache.hadoop.mapreduce.Reducer$Context"],"exceptions":["java.lang.InterruptedException","java.io.IOException"]}}},"org.apache.hadoop.mapred.lib.aggregate.ValueAggregatorJobBase":{"name":"org.apache.hadoop.mapred.lib.aggregate.ValueAggregatorJobBase","methods":{"void configure(org.apache.hadoop.mapred.JobConf)":{"name":"configure","returnType":"void","args":["org.apache.hadoop.mapred.JobConf"],"exceptions":[]},"void close() throws java.io.IOException":{"name":"close","returnType":"void","args":[],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.mapreduce.lib.input.KeyValueLineRecordReader":{"name":"org.apache.hadoop.mapreduce.lib.input.KeyValueLineRecordReader","methods":{"java.lang.Object getCurrentValue() throws java.lang.InterruptedException, java.io.IOException":{"name":"getCurrentValue","returnType":"java.lang.Object","args":[],"exceptions":["java.lang.InterruptedException","java.io.IOException"]},"int findSeparator([B, int, int, byte)":{"name":"findSeparator","returnType":"int","args":["[B","int","int","byte"],"exceptions":[]},"java.lang.Class getKeyClass()":{"name":"getKeyClass","returnType":"java.lang.Class","args":[],"exceptions":[]},"org.apache.hadoop.io.Text getCurrentValue()":{"name":"getCurrentValue","returnType":"org.apache.hadoop.io.Text","args":[],"exceptions":[]},"float getProgress() throws java.io.IOException":{"name":"getProgress","returnType":"float","args":[],"exceptions":["java.io.IOException"]},"void initialize(org.apache.hadoop.mapreduce.InputSplit, org.apache.hadoop.mapreduce.TaskAttemptContext) throws java.io.IOException":{"name":"initialize","returnType":"void","args":["org.apache.hadoop.mapreduce.InputSplit","org.apache.hadoop.mapreduce.TaskAttemptContext"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.io.Text getCurrentKey()":{"name":"getCurrentKey","returnType":"org.apache.hadoop.io.Text","args":[],"exceptions":[]},"boolean nextKeyValue() throws java.io.IOException":{"name":"nextKeyValue","returnType":"boolean","args":[],"exceptions":["java.io.IOException"]},"void setKeyValue(org.apache.hadoop.io.Text, org.apache.hadoop.io.Text, [B, int, int)":{"name":"setKeyValue","returnType":"void","args":["org.apache.hadoop.io.Text","org.apache.hadoop.io.Text","[B","int","int"],"exceptions":[]},"java.lang.Object getCurrentKey() throws java.lang.InterruptedException, java.io.IOException":{"name":"getCurrentKey","returnType":"java.lang.Object","args":[],"exceptions":["java.lang.InterruptedException","java.io.IOException"]},"void close() throws java.io.IOException":{"name":"close","returnType":"void","args":[],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.mapreduce.lib.join.ComposableRecordReader":{"name":"org.apache.hadoop.mapreduce.lib.join.ComposableRecordReader","methods":{}},"org.apache.hadoop.mapreduce.lib.input.SequenceFileInputFilter":{"name":"org.apache.hadoop.mapreduce.lib.input.SequenceFileInputFilter","methods":{"void setFilterClass(org.apache.hadoop.mapreduce.Job, java.lang.Class)":{"name":"setFilterClass","returnType":"void","args":["org.apache.hadoop.mapreduce.Job","java.lang.Class"],"exceptions":[]},"org.apache.hadoop.mapreduce.RecordReader createRecordReader(org.apache.hadoop.mapreduce.InputSplit, org.apache.hadoop.mapreduce.TaskAttemptContext) throws java.io.IOException":{"name":"createRecordReader","returnType":"org.apache.hadoop.mapreduce.RecordReader","args":["org.apache.hadoop.mapreduce.InputSplit","org.apache.hadoop.mapreduce.TaskAttemptContext"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.mapreduce.lib.chain.ChainMapper":{"name":"org.apache.hadoop.mapreduce.lib.chain.ChainMapper","methods":{"void run(org.apache.hadoop.mapreduce.Mapper$Context) throws java.lang.InterruptedException, java.io.IOException":{"name":"run","returnType":"void","args":["org.apache.hadoop.mapreduce.Mapper$Context"],"exceptions":["java.lang.InterruptedException","java.io.IOException"]},"void addMapper(org.apache.hadoop.mapreduce.Job, java.lang.Class, java.lang.Class, java.lang.Class, java.lang.Class, java.lang.Class, org.apache.hadoop.conf.Configuration) throws java.io.IOException":{"name":"addMapper","returnType":"void","args":["org.apache.hadoop.mapreduce.Job","java.lang.Class","java.lang.Class","java.lang.Class","java.lang.Class","java.lang.Class","org.apache.hadoop.conf.Configuration"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.mapred.RecordReader":{"name":"org.apache.hadoop.mapred.RecordReader","methods":{"long getPos() throws java.io.IOException":{"name":"getPos","returnType":"long","args":[],"exceptions":["java.io.IOException"]},"float getProgress() throws java.io.IOException":{"name":"getProgress","returnType":"float","args":[],"exceptions":["java.io.IOException"]},"java.lang.Object createKey()":{"name":"createKey","returnType":"java.lang.Object","args":[],"exceptions":[]},"java.lang.Object createValue()":{"name":"createValue","returnType":"java.lang.Object","args":[],"exceptions":[]},"boolean next(java.lang.Object, java.lang.Object) throws java.io.IOException":{"name":"next","returnType":"boolean","args":["java.lang.Object","java.lang.Object"],"exceptions":["java.io.IOException"]},"void close() throws java.io.IOException":{"name":"close","returnType":"void","args":[],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.mapred.lib.aggregate.ValueAggregatorBaseDescriptor":{"name":"org.apache.hadoop.mapred.lib.aggregate.ValueAggregatorBaseDescriptor","methods":{"org.apache.hadoop.mapred.lib.aggregate.ValueAggregator generateValueAggregator(java.lang.String)":{"name":"generateValueAggregator","returnType":"org.apache.hadoop.mapred.lib.aggregate.ValueAggregator","args":["java.lang.String"],"exceptions":[]},"void configure(org.apache.hadoop.mapred.JobConf)":{"name":"configure","returnType":"void","args":["org.apache.hadoop.mapred.JobConf"],"exceptions":[]},"java.util.Map$Entry generateEntry(java.lang.String, java.lang.String, org.apache.hadoop.io.Text)":{"name":"generateEntry","returnType":"java.util.Map$Entry","args":["java.lang.String","java.lang.String","org.apache.hadoop.io.Text"],"exceptions":[]}}},"org.apache.hadoop.mapred.FileOutputFormat":{"name":"org.apache.hadoop.mapred.FileOutputFormat","methods":{"void setOutputPath(org.apache.hadoop.mapred.JobConf, org.apache.hadoop.fs.Path)":{"name":"setOutputPath","returnType":"void","args":["org.apache.hadoop.mapred.JobConf","org.apache.hadoop.fs.Path"],"exceptions":[]},"org.apache.hadoop.fs.Path getTaskOutputPath(org.apache.hadoop.mapred.JobConf, java.lang.String) throws java.io.IOException":{"name":"getTaskOutputPath","returnType":"org.apache.hadoop.fs.Path","args":["org.apache.hadoop.mapred.JobConf","java.lang.String"],"exceptions":["java.io.IOException"]},"void setOutputCompressorClass(org.apache.hadoop.mapred.JobConf, java.lang.Class)":{"name":"setOutputCompressorClass","returnType":"void","args":["org.apache.hadoop.mapred.JobConf","java.lang.Class"],"exceptions":[]},"java.lang.Class getOutputCompressorClass(org.apache.hadoop.mapred.JobConf, java.lang.Class)":{"name":"getOutputCompressorClass","returnType":"java.lang.Class","args":["org.apache.hadoop.mapred.JobConf","java.lang.Class"],"exceptions":[]},"void setCompressOutput(org.apache.hadoop.mapred.JobConf, boolean)":{"name":"setCompressOutput","returnType":"void","args":["org.apache.hadoop.mapred.JobConf","boolean"],"exceptions":[]},"java.lang.String getUniqueName(org.apache.hadoop.mapred.JobConf, java.lang.String)":{"name":"getUniqueName","returnType":"java.lang.String","args":["org.apache.hadoop.mapred.JobConf","java.lang.String"],"exceptions":[]},"org.apache.hadoop.fs.Path getOutputPath(org.apache.hadoop.mapred.JobConf)":{"name":"getOutputPath","returnType":"org.apache.hadoop.fs.Path","args":["org.apache.hadoop.mapred.JobConf"],"exceptions":[]},"void checkOutputSpecs(org.apache.hadoop.fs.FileSystem, org.apache.hadoop.mapred.JobConf) throws org.apache.hadoop.mapred.InvalidJobConfException, java.io.IOException, org.apache.hadoop.mapred.FileAlreadyExistsException":{"name":"checkOutputSpecs","returnType":"void","args":["org.apache.hadoop.fs.FileSystem","org.apache.hadoop.mapred.JobConf"],"exceptions":["org.apache.hadoop.mapred.InvalidJobConfException","java.io.IOException","org.apache.hadoop.mapred.FileAlreadyExistsException"]},"org.apache.hadoop.fs.Path getPathForCustomFile(org.apache.hadoop.mapred.JobConf, java.lang.String)":{"name":"getPathForCustomFile","returnType":"org.apache.hadoop.fs.Path","args":["org.apache.hadoop.mapred.JobConf","java.lang.String"],"exceptions":[]},"void setWorkOutputPath(org.apache.hadoop.mapred.JobConf, org.apache.hadoop.fs.Path)":{"name":"setWorkOutputPath","returnType":"void","args":["org.apache.hadoop.mapred.JobConf","org.apache.hadoop.fs.Path"],"exceptions":[]},"boolean getCompressOutput(org.apache.hadoop.mapred.JobConf)":{"name":"getCompressOutput","returnType":"boolean","args":["org.apache.hadoop.mapred.JobConf"],"exceptions":[]},"org.apache.hadoop.fs.Path getWorkOutputPath(org.apache.hadoop.mapred.JobConf)":{"name":"getWorkOutputPath","returnType":"org.apache.hadoop.fs.Path","args":["org.apache.hadoop.mapred.JobConf"],"exceptions":[]},"org.apache.hadoop.mapred.RecordWriter getRecordWriter(org.apache.hadoop.fs.FileSystem, org.apache.hadoop.mapred.JobConf, java.lang.String, org.apache.hadoop.util.Progressable) throws java.io.IOException":{"name":"getRecordWriter","returnType":"org.apache.hadoop.mapred.RecordWriter","args":["org.apache.hadoop.fs.FileSystem","org.apache.hadoop.mapred.JobConf","java.lang.String","org.apache.hadoop.util.Progressable"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.mapreduce.lib.join.CompositeRecordReader":{"name":"org.apache.hadoop.mapreduce.lib.join.CompositeRecordReader","methods":{"void accept(org.apache.hadoop.mapreduce.lib.join.CompositeRecordReader$JoinCollector, org.apache.hadoop.io.WritableComparable) throws java.lang.InterruptedException, java.io.IOException":{"name":"accept","returnType":"void","args":["org.apache.hadoop.mapreduce.lib.join.CompositeRecordReader$JoinCollector","org.apache.hadoop.io.WritableComparable"],"exceptions":["java.lang.InterruptedException","java.io.IOException"]},"int id()":{"name":"id","returnType":"int","args":[],"exceptions":[]},"void initialize(org.apache.hadoop.mapreduce.InputSplit, org.apache.hadoop.mapreduce.TaskAttemptContext) throws java.lang.InterruptedException, java.io.IOException":{"name":"initialize","returnType":"void","args":["org.apache.hadoop.mapreduce.InputSplit","org.apache.hadoop.mapreduce.TaskAttemptContext"],"exceptions":["java.lang.InterruptedException","java.io.IOException"]},"void skip(org.apache.hadoop.io.WritableComparable) throws java.lang.InterruptedException, java.io.IOException":{"name":"skip","returnType":"void","args":["org.apache.hadoop.io.WritableComparable"],"exceptions":["java.lang.InterruptedException","java.io.IOException"]},"int compareTo(org.apache.hadoop.mapreduce.lib.join.ComposableRecordReader)":{"name":"compareTo","returnType":"int","args":["org.apache.hadoop.mapreduce.lib.join.ComposableRecordReader"],"exceptions":[]},"org.apache.hadoop.conf.Configuration getConf()":{"name":"getConf","returnType":"org.apache.hadoop.conf.Configuration","args":[],"exceptions":[]},"org.apache.hadoop.io.WritableComparable key()":{"name":"key","returnType":"org.apache.hadoop.io.WritableComparable","args":[],"exceptions":[]},"float getProgress() throws java.lang.InterruptedException, java.io.IOException":{"name":"getProgress","returnType":"float","args":[],"exceptions":["java.lang.InterruptedException","java.io.IOException"]},"int compareTo(java.lang.Object)":{"name":"compareTo","returnType":"int","args":["java.lang.Object"],"exceptions":[]},"boolean hasNext()":{"name":"hasNext","returnType":"boolean","args":[],"exceptions":[]},"org.apache.hadoop.io.Writable getCurrentValue() throws java.lang.InterruptedException, java.io.IOException":{"name":"getCurrentValue","returnType":"org.apache.hadoop.io.Writable","args":[],"exceptions":["java.lang.InterruptedException","java.io.IOException"]},"java.lang.Object getCurrentValue() throws java.lang.InterruptedException, java.io.IOException":{"name":"getCurrentValue","returnType":"java.lang.Object","args":[],"exceptions":["java.lang.InterruptedException","java.io.IOException"]},"void add(org.apache.hadoop.mapreduce.lib.join.ComposableRecordReader) throws java.lang.InterruptedException, java.io.IOException":{"name":"add","returnType":"void","args":["org.apache.hadoop.mapreduce.lib.join.ComposableRecordReader"],"exceptions":["java.lang.InterruptedException","java.io.IOException"]},"void setConf(org.apache.hadoop.conf.Configuration)":{"name":"setConf","returnType":"void","args":["org.apache.hadoop.conf.Configuration"],"exceptions":[]},"org.apache.hadoop.io.WritableComparable getCurrentKey()":{"name":"getCurrentKey","returnType":"org.apache.hadoop.io.WritableComparable","args":[],"exceptions":[]},"java.lang.Object getCurrentKey() throws java.lang.InterruptedException, java.io.IOException":{"name":"getCurrentKey","returnType":"java.lang.Object","args":[],"exceptions":["java.lang.InterruptedException","java.io.IOException"]},"void close() throws java.io.IOException":{"name":"close","returnType":"void","args":[],"exceptions":["java.io.IOException"]},"void key(org.apache.hadoop.io.WritableComparable) throws java.io.IOException":{"name":"key","returnType":"void","args":["org.apache.hadoop.io.WritableComparable"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.mapred.join.JoinRecordReader":{"name":"org.apache.hadoop.mapred.join.JoinRecordReader","methods":{"org.apache.hadoop.mapred.join.TupleWritable createValue()":{"name":"createValue","returnType":"org.apache.hadoop.mapred.join.TupleWritable","args":[],"exceptions":[]},"java.lang.Object createKey()":{"name":"createKey","returnType":"java.lang.Object","args":[],"exceptions":[]},"java.lang.Object createValue()":{"name":"createValue","returnType":"java.lang.Object","args":[],"exceptions":[]},"boolean next(org.apache.hadoop.io.WritableComparable, org.apache.hadoop.mapred.join.TupleWritable) throws java.io.IOException":{"name":"next","returnType":"boolean","args":["org.apache.hadoop.io.WritableComparable","org.apache.hadoop.mapred.join.TupleWritable"],"exceptions":["java.io.IOException"]},"int compareTo(java.lang.Object)":{"name":"compareTo","returnType":"int","args":["java.lang.Object"],"exceptions":[]},"boolean next(java.lang.Object, java.lang.Object) throws java.io.IOException":{"name":"next","returnType":"boolean","args":["java.lang.Object","java.lang.Object"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.mapreduce.lib.output.LazyOutputFormat":{"name":"org.apache.hadoop.mapreduce.lib.output.LazyOutputFormat","methods":{"void setOutputFormatClass(org.apache.hadoop.mapreduce.Job, java.lang.Class)":{"name":"setOutputFormatClass","returnType":"void","args":["org.apache.hadoop.mapreduce.Job","java.lang.Class"],"exceptions":[]},"org.apache.hadoop.mapreduce.OutputCommitter getOutputCommitter(org.apache.hadoop.mapreduce.TaskAttemptContext) throws java.lang.InterruptedException, java.io.IOException":{"name":"getOutputCommitter","returnType":"org.apache.hadoop.mapreduce.OutputCommitter","args":["org.apache.hadoop.mapreduce.TaskAttemptContext"],"exceptions":["java.lang.InterruptedException","java.io.IOException"]},"void checkOutputSpecs(org.apache.hadoop.mapreduce.JobContext) throws java.lang.InterruptedException, java.io.IOException":{"name":"checkOutputSpecs","returnType":"void","args":["org.apache.hadoop.mapreduce.JobContext"],"exceptions":["java.lang.InterruptedException","java.io.IOException"]},"org.apache.hadoop.mapreduce.RecordWriter getRecordWriter(org.apache.hadoop.mapreduce.TaskAttemptContext) throws java.lang.InterruptedException, java.io.IOException":{"name":"getRecordWriter","returnType":"org.apache.hadoop.mapreduce.RecordWriter","args":["org.apache.hadoop.mapreduce.TaskAttemptContext"],"exceptions":["java.lang.InterruptedException","java.io.IOException"]}}},"org.apache.hadoop.mapred.join.OuterJoinRecordReader":{"name":"org.apache.hadoop.mapred.join.OuterJoinRecordReader","methods":{}},"org.apache.hadoop.mapred.TextOutputFormat":{"name":"org.apache.hadoop.mapred.TextOutputFormat","methods":{"org.apache.hadoop.mapred.RecordWriter getRecordWriter(org.apache.hadoop.fs.FileSystem, org.apache.hadoop.mapred.JobConf, java.lang.String, org.apache.hadoop.util.Progressable) throws java.io.IOException":{"name":"getRecordWriter","returnType":"org.apache.hadoop.mapred.RecordWriter","args":["org.apache.hadoop.fs.FileSystem","org.apache.hadoop.mapred.JobConf","java.lang.String","org.apache.hadoop.util.Progressable"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.mapred.pipes.Submitter":{"name":"org.apache.hadoop.mapred.pipes.Submitter","methods":{"boolean getKeepCommandFile(org.apache.hadoop.mapred.JobConf)":{"name":"getKeepCommandFile","returnType":"boolean","args":["org.apache.hadoop.mapred.JobConf"],"exceptions":[]},"org.apache.hadoop.mapred.RunningJob jobSubmit(org.apache.hadoop.mapred.JobConf) throws java.io.IOException":{"name":"jobSubmit","returnType":"org.apache.hadoop.mapred.RunningJob","args":["org.apache.hadoop.mapred.JobConf"],"exceptions":["java.io.IOException"]},"void setIsJavaMapper(org.apache.hadoop.mapred.JobConf, boolean)":{"name":"setIsJavaMapper","returnType":"void","args":["org.apache.hadoop.mapred.JobConf","boolean"],"exceptions":[]},"void setIsJavaRecordWriter(org.apache.hadoop.mapred.JobConf, boolean)":{"name":"setIsJavaRecordWriter","returnType":"void","args":["org.apache.hadoop.mapred.JobConf","boolean"],"exceptions":[]},"void main([Ljava.lang.String;) throws java.lang.Exception":{"name":"main","returnType":"void","args":["[Ljava.lang.String;"],"exceptions":["java.lang.Exception"]},"boolean getIsJavaRecordWriter(org.apache.hadoop.mapred.JobConf)":{"name":"getIsJavaRecordWriter","returnType":"boolean","args":["org.apache.hadoop.mapred.JobConf"],"exceptions":[]},"boolean getIsJavaReducer(org.apache.hadoop.mapred.JobConf)":{"name":"getIsJavaReducer","returnType":"boolean","args":["org.apache.hadoop.mapred.JobConf"],"exceptions":[]},"org.apache.hadoop.mapred.RunningJob submitJob(org.apache.hadoop.mapred.JobConf) throws java.io.IOException":{"name":"submitJob","returnType":"org.apache.hadoop.mapred.RunningJob","args":["org.apache.hadoop.mapred.JobConf"],"exceptions":["java.io.IOException"]},"void setIsJavaRecordReader(org.apache.hadoop.mapred.JobConf, boolean)":{"name":"setIsJavaRecordReader","returnType":"void","args":["org.apache.hadoop.mapred.JobConf","boolean"],"exceptions":[]},"int run([Ljava.lang.String;) throws java.lang.Exception":{"name":"run","returnType":"int","args":["[Ljava.lang.String;"],"exceptions":["java.lang.Exception"]},"java.lang.String getExecutable(org.apache.hadoop.mapred.JobConf)":{"name":"getExecutable","returnType":"java.lang.String","args":["org.apache.hadoop.mapred.JobConf"],"exceptions":[]},"void setKeepCommandFile(org.apache.hadoop.mapred.JobConf, boolean)":{"name":"setKeepCommandFile","returnType":"void","args":["org.apache.hadoop.mapred.JobConf","boolean"],"exceptions":[]},"void setIsJavaReducer(org.apache.hadoop.mapred.JobConf, boolean)":{"name":"setIsJavaReducer","returnType":"void","args":["org.apache.hadoop.mapred.JobConf","boolean"],"exceptions":[]},"void setExecutable(org.apache.hadoop.mapred.JobConf, java.lang.String)":{"name":"setExecutable","returnType":"void","args":["org.apache.hadoop.mapred.JobConf","java.lang.String"],"exceptions":[]},"org.apache.hadoop.mapred.RunningJob runJob(org.apache.hadoop.mapred.JobConf) throws java.io.IOException":{"name":"runJob","returnType":"org.apache.hadoop.mapred.RunningJob","args":["org.apache.hadoop.mapred.JobConf"],"exceptions":["java.io.IOException"]},"boolean getIsJavaMapper(org.apache.hadoop.mapred.JobConf)":{"name":"getIsJavaMapper","returnType":"boolean","args":["org.apache.hadoop.mapred.JobConf"],"exceptions":[]},"boolean getIsJavaRecordReader(org.apache.hadoop.mapred.JobConf)":{"name":"getIsJavaRecordReader","returnType":"boolean","args":["org.apache.hadoop.mapred.JobConf"],"exceptions":[]}}},"org.apache.hadoop.mapred.lib.CombineFileInputFormat":{"name":"org.apache.hadoop.mapred.lib.CombineFileInputFormat","methods":{"org.apache.hadoop.mapred.RecordReader getRecordReader(org.apache.hadoop.mapred.InputSplit, org.apache.hadoop.mapred.JobConf, org.apache.hadoop.mapred.Reporter) throws java.io.IOException":{"name":"getRecordReader","returnType":"org.apache.hadoop.mapred.RecordReader","args":["org.apache.hadoop.mapred.InputSplit","org.apache.hadoop.mapred.JobConf","org.apache.hadoop.mapred.Reporter"],"exceptions":["java.io.IOException"]},"[Lorg.apache.hadoop.mapred.InputSplit; getSplits(org.apache.hadoop.mapred.JobConf, int) throws java.io.IOException":{"name":"getSplits","returnType":"[Lorg.apache.hadoop.mapred.InputSplit;","args":["org.apache.hadoop.mapred.JobConf","int"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.mapreduce.RecordReader createRecordReader(org.apache.hadoop.mapreduce.InputSplit, org.apache.hadoop.mapreduce.TaskAttemptContext) throws java.io.IOException":{"name":"createRecordReader","returnType":"org.apache.hadoop.mapreduce.RecordReader","args":["org.apache.hadoop.mapreduce.InputSplit","org.apache.hadoop.mapreduce.TaskAttemptContext"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.mapred.lib.aggregate.ValueAggregatorDescriptor":{"name":"org.apache.hadoop.mapred.lib.aggregate.ValueAggregatorDescriptor","methods":{"void configure(org.apache.hadoop.mapred.JobConf)":{"name":"configure","returnType":"void","args":["org.apache.hadoop.mapred.JobConf"],"exceptions":[]}}},"org.apache.hadoop.mapred.ClusterStatus":{"name":"org.apache.hadoop.mapred.ClusterStatus","methods":{"int getTaskTrackers()":{"name":"getTaskTrackers","returnType":"int","args":[],"exceptions":[]},"int getMaxMapTasks()":{"name":"getMaxMapTasks","returnType":"int","args":[],"exceptions":[]},"long getMaxMemory()":{"name":"getMaxMemory","returnType":"long","args":[],"exceptions":[]},"int getMaxReduceTasks()":{"name":"getMaxReduceTasks","returnType":"int","args":[],"exceptions":[]},"java.util.Collection getGraylistedTrackerNames()":{"name":"getGraylistedTrackerNames","returnType":"java.util.Collection","args":[],"exceptions":[]},"org.apache.hadoop.mapreduce.Cluster$JobTrackerStatus getJobTrackerStatus()":{"name":"getJobTrackerStatus","returnType":"org.apache.hadoop.mapreduce.Cluster$JobTrackerStatus","args":[],"exceptions":[]},"int getReduceTasks()":{"name":"getReduceTasks","returnType":"int","args":[],"exceptions":[]},"int getGraylistedTrackers()":{"name":"getGraylistedTrackers","returnType":"int","args":[],"exceptions":[]},"long getTTExpiryInterval()":{"name":"getTTExpiryInterval","returnType":"long","args":[],"exceptions":[]},"long getUsedMemory()":{"name":"getUsedMemory","returnType":"long","args":[],"exceptions":[]},"java.util.Collection getActiveTrackerNames()":{"name":"getActiveTrackerNames","returnType":"java.util.Collection","args":[],"exceptions":[]},"void write(java.io.DataOutput) throws java.io.IOException":{"name":"write","returnType":"void","args":["java.io.DataOutput"],"exceptions":["java.io.IOException"]},"int getMapTasks()":{"name":"getMapTasks","returnType":"int","args":[],"exceptions":[]},"org.apache.hadoop.mapred.JobTracker$State getJobTrackerState()":{"name":"getJobTrackerState","returnType":"org.apache.hadoop.mapred.JobTracker$State","args":[],"exceptions":[]},"int getBlacklistedTrackers()":{"name":"getBlacklistedTrackers","returnType":"int","args":[],"exceptions":[]},"java.util.Collection getBlacklistedTrackerNames()":{"name":"getBlacklistedTrackerNames","returnType":"java.util.Collection","args":[],"exceptions":[]},"java.util.Collection getBlackListedTrackersInfo()":{"name":"getBlackListedTrackersInfo","returnType":"java.util.Collection","args":[],"exceptions":[]},"void readFields(java.io.DataInput) throws java.io.IOException":{"name":"readFields","returnType":"void","args":["java.io.DataInput"],"exceptions":["java.io.IOException"]},"int getNumExcludedNodes()":{"name":"getNumExcludedNodes","returnType":"int","args":[],"exceptions":[]}}},"org.apache.hadoop.mapred.MapReduceBase":{"name":"org.apache.hadoop.mapred.MapReduceBase","methods":{"void configure(org.apache.hadoop.mapred.JobConf)":{"name":"configure","returnType":"void","args":["org.apache.hadoop.mapred.JobConf"],"exceptions":[]},"void close() throws java.io.IOException":{"name":"close","returnType":"void","args":[],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.mapred.join.TupleWritable":{"name":"org.apache.hadoop.mapred.join.TupleWritable","methods":{}},"org.apache.hadoop.mapred.ID":{"name":"org.apache.hadoop.mapred.ID","methods":{}},"org.apache.hadoop.mapred.lib.RegexMapper":{"name":"org.apache.hadoop.mapred.lib.RegexMapper","methods":{"void map(java.lang.Object, java.lang.Object, org.apache.hadoop.mapred.OutputCollector, org.apache.hadoop.mapred.Reporter) throws java.io.IOException":{"name":"map","returnType":"void","args":["java.lang.Object","java.lang.Object","org.apache.hadoop.mapred.OutputCollector","org.apache.hadoop.mapred.Reporter"],"exceptions":["java.io.IOException"]},"void map(java.lang.Object, org.apache.hadoop.io.Text, org.apache.hadoop.mapred.OutputCollector, org.apache.hadoop.mapred.Reporter) throws java.io.IOException":{"name":"map","returnType":"void","args":["java.lang.Object","org.apache.hadoop.io.Text","org.apache.hadoop.mapred.OutputCollector","org.apache.hadoop.mapred.Reporter"],"exceptions":["java.io.IOException"]},"void configure(org.apache.hadoop.mapred.JobConf)":{"name":"configure","returnType":"void","args":["org.apache.hadoop.mapred.JobConf"],"exceptions":[]}}},"org.apache.hadoop.mapreduce.lib.input.SequenceFileInputFormat":{"name":"org.apache.hadoop.mapreduce.lib.input.SequenceFileInputFormat","methods":{"org.apache.hadoop.mapreduce.RecordReader createRecordReader(org.apache.hadoop.mapreduce.InputSplit, org.apache.hadoop.mapreduce.TaskAttemptContext) throws java.io.IOException":{"name":"createRecordReader","returnType":"org.apache.hadoop.mapreduce.RecordReader","args":["org.apache.hadoop.mapreduce.InputSplit","org.apache.hadoop.mapreduce.TaskAttemptContext"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.mapred.lib.aggregate.ValueAggregator":{"name":"org.apache.hadoop.mapred.lib.aggregate.ValueAggregator","methods":{}},"org.apache.hadoop.mapreduce.lib.aggregate.ValueAggregatorBaseDescriptor":{"name":"org.apache.hadoop.mapreduce.lib.aggregate.ValueAggregatorBaseDescriptor","methods":{"java.util.ArrayList generateKeyValPairs(java.lang.Object, java.lang.Object)":{"name":"generateKeyValPairs","returnType":"java.util.ArrayList","args":["java.lang.Object","java.lang.Object"],"exceptions":[]},"org.apache.hadoop.mapreduce.lib.aggregate.ValueAggregator generateValueAggregator(java.lang.String, long)":{"name":"generateValueAggregator","returnType":"org.apache.hadoop.mapreduce.lib.aggregate.ValueAggregator","args":["java.lang.String","long"],"exceptions":[]},"void configure(org.apache.hadoop.conf.Configuration)":{"name":"configure","returnType":"void","args":["org.apache.hadoop.conf.Configuration"],"exceptions":[]},"java.util.Map$Entry generateEntry(java.lang.String, java.lang.String, org.apache.hadoop.io.Text)":{"name":"generateEntry","returnType":"java.util.Map$Entry","args":["java.lang.String","java.lang.String","org.apache.hadoop.io.Text"],"exceptions":[]}}},"org.apache.hadoop.mapreduce.lib.aggregate.DoubleValueSum":{"name":"org.apache.hadoop.mapreduce.lib.aggregate.DoubleValueSum","methods":{"void reset()":{"name":"reset","returnType":"void","args":[],"exceptions":[]},"void addNextValue(double)":{"name":"addNextValue","returnType":"void","args":["double"],"exceptions":[]},"java.util.ArrayList getCombinerOutput()":{"name":"getCombinerOutput","returnType":"java.util.ArrayList","args":[],"exceptions":[]},"java.lang.String getReport()":{"name":"getReport","returnType":"java.lang.String","args":[],"exceptions":[]},"double getSum()":{"name":"getSum","returnType":"double","args":[],"exceptions":[]},"void addNextValue(java.lang.Object)":{"name":"addNextValue","returnType":"void","args":["java.lang.Object"],"exceptions":[]}}},"org.apache.hadoop.mapreduce.lib.aggregate.LongValueMin":{"name":"org.apache.hadoop.mapreduce.lib.aggregate.LongValueMin","methods":{"void reset()":{"name":"reset","returnType":"void","args":[],"exceptions":[]},"long getVal()":{"name":"getVal","returnType":"long","args":[],"exceptions":[]},"java.util.ArrayList getCombinerOutput()":{"name":"getCombinerOutput","returnType":"java.util.ArrayList","args":[],"exceptions":[]},"void addNextValue(long)":{"name":"addNextValue","returnType":"void","args":["long"],"exceptions":[]},"java.lang.String getReport()":{"name":"getReport","returnType":"java.lang.String","args":[],"exceptions":[]},"void addNextValue(java.lang.Object)":{"name":"addNextValue","returnType":"void","args":["java.lang.Object"],"exceptions":[]}}},"org.apache.hadoop.mapreduce.InputSplit":{"name":"org.apache.hadoop.mapreduce.InputSplit","methods":{"[Lorg.apache.hadoop.mapred.SplitLocationInfo; getLocationInfo() throws java.io.IOException":{"name":"getLocationInfo","returnType":"[Lorg.apache.hadoop.mapred.SplitLocationInfo;","args":[],"exceptions":["java.io.IOException"]},"[Ljava.lang.String; getLocations() throws java.lang.InterruptedException, java.io.IOException":{"name":"getLocations","returnType":"[Ljava.lang.String;","args":[],"exceptions":["java.lang.InterruptedException","java.io.IOException"]},"long getLength() throws java.lang.InterruptedException, java.io.IOException":{"name":"getLength","returnType":"long","args":[],"exceptions":["java.lang.InterruptedException","java.io.IOException"]}}},"org.apache.hadoop.mapred.lib.TotalOrderPartitioner":{"name":"org.apache.hadoop.mapred.lib.TotalOrderPartitioner","methods":{"void setPartitionFile(org.apache.hadoop.mapred.JobConf, org.apache.hadoop.fs.Path)":{"name":"setPartitionFile","returnType":"void","args":["org.apache.hadoop.mapred.JobConf","org.apache.hadoop.fs.Path"],"exceptions":[]},"java.lang.String getPartitionFile(org.apache.hadoop.mapred.JobConf)":{"name":"getPartitionFile","returnType":"java.lang.String","args":["org.apache.hadoop.mapred.JobConf"],"exceptions":[]},"void configure(org.apache.hadoop.mapred.JobConf)":{"name":"configure","returnType":"void","args":["org.apache.hadoop.mapred.JobConf"],"exceptions":[]}}},"org.apache.hadoop.mapreduce.Counter":{"name":"org.apache.hadoop.mapreduce.Counter","methods":{"void setDisplayName(java.lang.String)":{"name":"setDisplayName","returnType":"void","args":["java.lang.String"],"exceptions":[]},"long getValue()":{"name":"getValue","returnType":"long","args":[],"exceptions":[]},"java.lang.String getName()":{"name":"getName","returnType":"java.lang.String","args":[],"exceptions":[]},"org.apache.hadoop.mapreduce.Counter getUnderlyingCounter()":{"name":"getUnderlyingCounter","returnType":"org.apache.hadoop.mapreduce.Counter","args":[],"exceptions":[]},"void increment(long)":{"name":"increment","returnType":"void","args":["long"],"exceptions":[]},"void setValue(long)":{"name":"setValue","returnType":"void","args":["long"],"exceptions":[]},"java.lang.String getDisplayName()":{"name":"getDisplayName","returnType":"java.lang.String","args":[],"exceptions":[]}}},"org.apache.hadoop.mapreduce.lib.db.DBInputFormat":{"name":"org.apache.hadoop.mapreduce.lib.db.DBInputFormat","methods":{"org.apache.hadoop.mapreduce.lib.db.DBConfiguration getDBConf()":{"name":"getDBConf","returnType":"org.apache.hadoop.mapreduce.lib.db.DBConfiguration","args":[],"exceptions":[]},"org.apache.hadoop.mapreduce.RecordReader createRecordReader(org.apache.hadoop.mapreduce.InputSplit, org.apache.hadoop.mapreduce.TaskAttemptContext) throws java.lang.InterruptedException, java.io.IOException":{"name":"createRecordReader","returnType":"org.apache.hadoop.mapreduce.RecordReader","args":["org.apache.hadoop.mapreduce.InputSplit","org.apache.hadoop.mapreduce.TaskAttemptContext"],"exceptions":["java.lang.InterruptedException","java.io.IOException"]},"org.apache.hadoop.conf.Configuration getConf()":{"name":"getConf","returnType":"org.apache.hadoop.conf.Configuration","args":[],"exceptions":[]},"void setConf(org.apache.hadoop.conf.Configuration)":{"name":"setConf","returnType":"void","args":["org.apache.hadoop.conf.Configuration"],"exceptions":[]},"java.sql.Connection createConnection()":{"name":"createConnection","returnType":"java.sql.Connection","args":[],"exceptions":[]},"void setInput(org.apache.hadoop.mapreduce.Job, java.lang.Class, java.lang.String, java.lang.String, java.lang.String, [Ljava.lang.String;)":{"name":"setInput","returnType":"void","args":["org.apache.hadoop.mapreduce.Job","java.lang.Class","java.lang.String","java.lang.String","java.lang.String","[Ljava.lang.String;"],"exceptions":[]},"java.sql.Connection getConnection()":{"name":"getConnection","returnType":"java.sql.Connection","args":[],"exceptions":[]},"java.lang.String getDBProductName()":{"name":"getDBProductName","returnType":"java.lang.String","args":[],"exceptions":[]},"void setInput(org.apache.hadoop.mapreduce.Job, java.lang.Class, java.lang.String, java.lang.String)":{"name":"setInput","returnType":"void","args":["org.apache.hadoop.mapreduce.Job","java.lang.Class","java.lang.String","java.lang.String"],"exceptions":[]},"java.util.List getSplits(org.apache.hadoop.mapreduce.JobContext) throws java.io.IOException":{"name":"getSplits","returnType":"java.util.List","args":["org.apache.hadoop.mapreduce.JobContext"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.mapreduce.lib.join.StreamBackedIterator":{"name":"org.apache.hadoop.mapreduce.lib.join.StreamBackedIterator","methods":{"void add(org.apache.hadoop.io.Writable) throws java.io.IOException":{"name":"add","returnType":"void","args":["org.apache.hadoop.io.Writable"],"exceptions":["java.io.IOException"]},"boolean next(org.apache.hadoop.io.Writable) throws java.io.IOException":{"name":"next","returnType":"boolean","args":["org.apache.hadoop.io.Writable"],"exceptions":["java.io.IOException"]},"void reset()":{"name":"reset","returnType":"void","args":[],"exceptions":[]},"boolean replay(org.apache.hadoop.io.Writable) throws java.io.IOException":{"name":"replay","returnType":"boolean","args":["org.apache.hadoop.io.Writable"],"exceptions":["java.io.IOException"]},"boolean hasNext()":{"name":"hasNext","returnType":"boolean","args":[],"exceptions":[]},"void close() throws java.io.IOException":{"name":"close","returnType":"void","args":[],"exceptions":["java.io.IOException"]},"void clear()":{"name":"clear","returnType":"void","args":[],"exceptions":[]}}},"org.apache.hadoop.mapred.join.CompositeInputFormat":{"name":"org.apache.hadoop.mapred.join.CompositeInputFormat","methods":{"org.apache.hadoop.mapred.join.ComposableRecordReader getRecordReader(org.apache.hadoop.mapred.InputSplit, org.apache.hadoop.mapred.JobConf, org.apache.hadoop.mapred.Reporter) throws java.io.IOException":{"name":"getRecordReader","returnType":"org.apache.hadoop.mapred.join.ComposableRecordReader","args":["org.apache.hadoop.mapred.InputSplit","org.apache.hadoop.mapred.JobConf","org.apache.hadoop.mapred.Reporter"],"exceptions":["java.io.IOException"]},"java.lang.String compose(java.lang.String, java.lang.Class, [Lorg.apache.hadoop.fs.Path;)":{"name":"compose","returnType":"java.lang.String","args":["java.lang.String","java.lang.Class","[Lorg.apache.hadoop.fs.Path;"],"exceptions":[]},"org.apache.hadoop.mapred.RecordReader getRecordReader(org.apache.hadoop.mapred.InputSplit, org.apache.hadoop.mapred.JobConf, org.apache.hadoop.mapred.Reporter) throws java.io.IOException":{"name":"getRecordReader","returnType":"org.apache.hadoop.mapred.RecordReader","args":["org.apache.hadoop.mapred.InputSplit","org.apache.hadoop.mapred.JobConf","org.apache.hadoop.mapred.Reporter"],"exceptions":["java.io.IOException"]},"void setFormat(org.apache.hadoop.mapred.JobConf) throws java.io.IOException":{"name":"setFormat","returnType":"void","args":["org.apache.hadoop.mapred.JobConf"],"exceptions":["java.io.IOException"]},"java.lang.String compose(java.lang.Class, java.lang.String)":{"name":"compose","returnType":"java.lang.String","args":["java.lang.Class","java.lang.String"],"exceptions":[]},"java.lang.String compose(java.lang.String, java.lang.Class, [Ljava.lang.String;)":{"name":"compose","returnType":"java.lang.String","args":["java.lang.String","java.lang.Class","[Ljava.lang.String;"],"exceptions":[]},"[Lorg.apache.hadoop.mapred.InputSplit; getSplits(org.apache.hadoop.mapred.JobConf, int) throws java.io.IOException":{"name":"getSplits","returnType":"[Lorg.apache.hadoop.mapred.InputSplit;","args":["org.apache.hadoop.mapred.JobConf","int"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.mapreduce.Partitioner":{"name":"org.apache.hadoop.mapreduce.Partitioner","methods":{"int getPartition(java.lang.Object, java.lang.Object, int)":{"name":"getPartition","returnType":"int","args":["java.lang.Object","java.lang.Object","int"],"exceptions":[]}}},"org.apache.hadoop.mapreduce.lib.output.NullOutputFormat":{"name":"org.apache.hadoop.mapreduce.lib.output.NullOutputFormat","methods":{"void checkOutputSpecs(org.apache.hadoop.mapreduce.JobContext)":{"name":"checkOutputSpecs","returnType":"void","args":["org.apache.hadoop.mapreduce.JobContext"],"exceptions":[]},"org.apache.hadoop.mapreduce.RecordWriter getRecordWriter(org.apache.hadoop.mapreduce.TaskAttemptContext)":{"name":"getRecordWriter","returnType":"org.apache.hadoop.mapreduce.RecordWriter","args":["org.apache.hadoop.mapreduce.TaskAttemptContext"],"exceptions":[]},"org.apache.hadoop.mapreduce.OutputCommitter getOutputCommitter(org.apache.hadoop.mapreduce.TaskAttemptContext)":{"name":"getOutputCommitter","returnType":"org.apache.hadoop.mapreduce.OutputCommitter","args":["org.apache.hadoop.mapreduce.TaskAttemptContext"],"exceptions":[]}}},"org.apache.hadoop.mapreduce.lib.reduce.IntSumReducer":{"name":"org.apache.hadoop.mapreduce.lib.reduce.IntSumReducer","methods":{"void reduce(java.lang.Object, java.lang.Iterable, org.apache.hadoop.mapreduce.Reducer$Context) throws java.lang.InterruptedException, java.io.IOException":{"name":"reduce","returnType":"void","args":["java.lang.Object","java.lang.Iterable","org.apache.hadoop.mapreduce.Reducer$Context"],"exceptions":["java.lang.InterruptedException","java.io.IOException"]}}},"org.apache.hadoop.mapred.TaskCompletionEvent$Status":{"name":"org.apache.hadoop.mapred.TaskCompletionEvent$Status","methods":{"[Lorg.apache.hadoop.mapred.TaskCompletionEvent$Status; values()":{"name":"values","returnType":"[Lorg.apache.hadoop.mapred.TaskCompletionEvent$Status;","args":[],"exceptions":[]},"org.apache.hadoop.mapred.TaskCompletionEvent$Status valueOf(java.lang.String)":{"name":"valueOf","returnType":"org.apache.hadoop.mapred.TaskCompletionEvent$Status","args":["java.lang.String"],"exceptions":[]}}},"org.apache.hadoop.mapred.JobContext":{"name":"org.apache.hadoop.mapred.JobContext","methods":{"org.apache.hadoop.util.Progressable getProgressible()":{"name":"getProgressible","returnType":"org.apache.hadoop.util.Progressable","args":[],"exceptions":[]},"org.apache.hadoop.mapred.JobConf getJobConf()":{"name":"getJobConf","returnType":"org.apache.hadoop.mapred.JobConf","args":[],"exceptions":[]}}},"org.apache.hadoop.mapreduce.OutputCommitter":{"name":"org.apache.hadoop.mapreduce.OutputCommitter","methods":{"boolean isCommitJobRepeatable(org.apache.hadoop.mapreduce.JobContext) throws java.io.IOException":{"name":"isCommitJobRepeatable","returnType":"boolean","args":["org.apache.hadoop.mapreduce.JobContext"],"exceptions":["java.io.IOException"]},"void commitJob(org.apache.hadoop.mapreduce.JobContext) throws java.io.IOException":{"name":"commitJob","returnType":"void","args":["org.apache.hadoop.mapreduce.JobContext"],"exceptions":["java.io.IOException"]},"void abortTask(org.apache.hadoop.mapreduce.TaskAttemptContext) throws java.io.IOException":{"name":"abortTask","returnType":"void","args":["org.apache.hadoop.mapreduce.TaskAttemptContext"],"exceptions":["java.io.IOException"]},"boolean isRecoverySupported(org.apache.hadoop.mapreduce.JobContext) throws java.io.IOException":{"name":"isRecoverySupported","returnType":"boolean","args":["org.apache.hadoop.mapreduce.JobContext"],"exceptions":["java.io.IOException"]},"boolean needsTaskCommit(org.apache.hadoop.mapreduce.TaskAttemptContext) throws java.io.IOException":{"name":"needsTaskCommit","returnType":"boolean","args":["org.apache.hadoop.mapreduce.TaskAttemptContext"],"exceptions":["java.io.IOException"]},"void setupJob(org.apache.hadoop.mapreduce.JobContext) throws java.io.IOException":{"name":"setupJob","returnType":"void","args":["org.apache.hadoop.mapreduce.JobContext"],"exceptions":["java.io.IOException"]},"void recoverTask(org.apache.hadoop.mapreduce.TaskAttemptContext) throws java.io.IOException":{"name":"recoverTask","returnType":"void","args":["org.apache.hadoop.mapreduce.TaskAttemptContext"],"exceptions":["java.io.IOException"]},"void cleanupJob(org.apache.hadoop.mapreduce.JobContext) throws java.io.IOException":{"name":"cleanupJob","returnType":"void","args":["org.apache.hadoop.mapreduce.JobContext"],"exceptions":["java.io.IOException"]},"void abortJob(org.apache.hadoop.mapreduce.JobContext, org.apache.hadoop.mapreduce.JobStatus$State) throws java.io.IOException":{"name":"abortJob","returnType":"void","args":["org.apache.hadoop.mapreduce.JobContext","org.apache.hadoop.mapreduce.JobStatus$State"],"exceptions":["java.io.IOException"]},"void setupTask(org.apache.hadoop.mapreduce.TaskAttemptContext) throws java.io.IOException":{"name":"setupTask","returnType":"void","args":["org.apache.hadoop.mapreduce.TaskAttemptContext"],"exceptions":["java.io.IOException"]},"void commitTask(org.apache.hadoop.mapreduce.TaskAttemptContext) throws java.io.IOException":{"name":"commitTask","returnType":"void","args":["org.apache.hadoop.mapreduce.TaskAttemptContext"],"exceptions":["java.io.IOException"]},"boolean isRecoverySupported()":{"name":"isRecoverySupported","returnType":"boolean","args":[],"exceptions":[]}}},"org.apache.hadoop.mapred.lib.aggregate.ValueHistogram":{"name":"org.apache.hadoop.mapred.lib.aggregate.ValueHistogram","methods":{}},"org.apache.hadoop.mapreduce.lib.input.KeyValueTextInputFormat":{"name":"org.apache.hadoop.mapreduce.lib.input.KeyValueTextInputFormat","methods":{"org.apache.hadoop.mapreduce.RecordReader createRecordReader(org.apache.hadoop.mapreduce.InputSplit, org.apache.hadoop.mapreduce.TaskAttemptContext) throws java.io.IOException":{"name":"createRecordReader","returnType":"org.apache.hadoop.mapreduce.RecordReader","args":["org.apache.hadoop.mapreduce.InputSplit","org.apache.hadoop.mapreduce.TaskAttemptContext"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.mapred.lib.CombineSequenceFileInputFormat":{"name":"org.apache.hadoop.mapred.lib.CombineSequenceFileInputFormat","methods":{"org.apache.hadoop.mapred.RecordReader getRecordReader(org.apache.hadoop.mapred.InputSplit, org.apache.hadoop.mapred.JobConf, org.apache.hadoop.mapred.Reporter) throws java.io.IOException":{"name":"getRecordReader","returnType":"org.apache.hadoop.mapred.RecordReader","args":["org.apache.hadoop.mapred.InputSplit","org.apache.hadoop.mapred.JobConf","org.apache.hadoop.mapred.Reporter"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.mapred.InputSplit":{"name":"org.apache.hadoop.mapred.InputSplit","methods":{"[Ljava.lang.String; getLocations() throws java.io.IOException":{"name":"getLocations","returnType":"[Ljava.lang.String;","args":[],"exceptions":["java.io.IOException"]},"long getLength() throws java.io.IOException":{"name":"getLength","returnType":"long","args":[],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.mapreduce.lib.aggregate.LongValueSum":{"name":"org.apache.hadoop.mapreduce.lib.aggregate.LongValueSum","methods":{"void reset()":{"name":"reset","returnType":"void","args":[],"exceptions":[]},"java.util.ArrayList getCombinerOutput()":{"name":"getCombinerOutput","returnType":"java.util.ArrayList","args":[],"exceptions":[]},"void addNextValue(long)":{"name":"addNextValue","returnType":"void","args":["long"],"exceptions":[]},"java.lang.String getReport()":{"name":"getReport","returnType":"java.lang.String","args":[],"exceptions":[]},"void addNextValue(java.lang.Object)":{"name":"addNextValue","returnType":"void","args":["java.lang.Object"],"exceptions":[]},"long getSum()":{"name":"getSum","returnType":"long","args":[],"exceptions":[]}}},"org.apache.hadoop.mapreduce.lib.input.SequenceFileRecordReader":{"name":"org.apache.hadoop.mapreduce.lib.input.SequenceFileRecordReader","methods":{"void initialize(org.apache.hadoop.mapreduce.InputSplit, org.apache.hadoop.mapreduce.TaskAttemptContext) throws java.lang.InterruptedException, java.io.IOException":{"name":"initialize","returnType":"void","args":["org.apache.hadoop.mapreduce.InputSplit","org.apache.hadoop.mapreduce.TaskAttemptContext"],"exceptions":["java.lang.InterruptedException","java.io.IOException"]},"java.lang.Object getCurrentValue()":{"name":"getCurrentValue","returnType":"java.lang.Object","args":[],"exceptions":[]},"float getProgress() throws java.io.IOException":{"name":"getProgress","returnType":"float","args":[],"exceptions":["java.io.IOException"]},"boolean nextKeyValue() throws java.lang.InterruptedException, java.io.IOException":{"name":"nextKeyValue","returnType":"boolean","args":[],"exceptions":["java.lang.InterruptedException","java.io.IOException"]},"java.lang.Object getCurrentKey()":{"name":"getCurrentKey","returnType":"java.lang.Object","args":[],"exceptions":[]},"void close() throws java.io.IOException":{"name":"close","returnType":"void","args":[],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.mapred.lib.MultipleTextOutputFormat":{"name":"org.apache.hadoop.mapred.lib.MultipleTextOutputFormat","methods":{}},"org.apache.hadoop.mapreduce.lib.aggregate.StringValueMin":{"name":"org.apache.hadoop.mapreduce.lib.aggregate.StringValueMin","methods":{"void reset()":{"name":"reset","returnType":"void","args":[],"exceptions":[]},"java.lang.String getVal()":{"name":"getVal","returnType":"java.lang.String","args":[],"exceptions":[]},"java.util.ArrayList getCombinerOutput()":{"name":"getCombinerOutput","returnType":"java.util.ArrayList","args":[],"exceptions":[]},"java.lang.String getReport()":{"name":"getReport","returnType":"java.lang.String","args":[],"exceptions":[]},"void addNextValue(java.lang.Object)":{"name":"addNextValue","returnType":"void","args":["java.lang.Object"],"exceptions":[]}}},"org.apache.hadoop.mapreduce.lib.join.OverrideRecordReader":{"name":"org.apache.hadoop.mapreduce.lib.join.OverrideRecordReader","methods":{"org.apache.hadoop.io.Writable createValue()":{"name":"createValue","returnType":"org.apache.hadoop.io.Writable","args":[],"exceptions":[]}}},"org.apache.hadoop.mapreduce.lib.aggregate.ValueAggregatorJob":{"name":"org.apache.hadoop.mapreduce.lib.aggregate.ValueAggregatorJob","methods":{"org.apache.hadoop.mapreduce.lib.jobcontrol.JobControl createValueAggregatorJobs([Ljava.lang.String;) throws java.io.IOException":{"name":"createValueAggregatorJobs","returnType":"org.apache.hadoop.mapreduce.lib.jobcontrol.JobControl","args":["[Ljava.lang.String;"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.mapreduce.lib.jobcontrol.JobControl createValueAggregatorJobs([Ljava.lang.String;, [Ljava.lang.Class;) throws java.io.IOException":{"name":"createValueAggregatorJobs","returnType":"org.apache.hadoop.mapreduce.lib.jobcontrol.JobControl","args":["[Ljava.lang.String;","[Ljava.lang.Class;"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.conf.Configuration setAggregatorDescriptors([Ljava.lang.Class;)":{"name":"setAggregatorDescriptors","returnType":"org.apache.hadoop.conf.Configuration","args":["[Ljava.lang.Class;"],"exceptions":[]},"void main([Ljava.lang.String;) throws java.lang.InterruptedException, java.io.IOException, java.lang.ClassNotFoundException":{"name":"main","returnType":"void","args":["[Ljava.lang.String;"],"exceptions":["java.lang.InterruptedException","java.io.IOException","java.lang.ClassNotFoundException"]},"org.apache.hadoop.mapreduce.Job createValueAggregatorJob([Ljava.lang.String;, [Ljava.lang.Class;) throws java.io.IOException":{"name":"createValueAggregatorJob","returnType":"org.apache.hadoop.mapreduce.Job","args":["[Ljava.lang.String;","[Ljava.lang.Class;"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.mapreduce.Job createValueAggregatorJob(org.apache.hadoop.conf.Configuration, [Ljava.lang.String;) throws java.io.IOException":{"name":"createValueAggregatorJob","returnType":"org.apache.hadoop.mapreduce.Job","args":["org.apache.hadoop.conf.Configuration","[Ljava.lang.String;"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.mapred.lib.aggregate.LongValueMin":{"name":"org.apache.hadoop.mapred.lib.aggregate.LongValueMin","methods":{}},"org.apache.hadoop.mapred.lib.aggregate.LongValueSum":{"name":"org.apache.hadoop.mapred.lib.aggregate.LongValueSum","methods":{}},"org.apache.hadoop.mapred.JobID":{"name":"org.apache.hadoop.mapred.JobID","methods":{"java.lang.String getJobIDsPattern(java.lang.String, java.lang.Integer)":{"name":"getJobIDsPattern","returnType":"java.lang.String","args":["java.lang.String","java.lang.Integer"],"exceptions":[]},"org.apache.hadoop.mapred.JobID forName(java.lang.String) throws java.lang.IllegalArgumentException":{"name":"forName","returnType":"org.apache.hadoop.mapred.JobID","args":["java.lang.String"],"exceptions":["java.lang.IllegalArgumentException"]},"org.apache.hadoop.mapred.JobID read(java.io.DataInput) throws java.io.IOException":{"name":"read","returnType":"org.apache.hadoop.mapred.JobID","args":["java.io.DataInput"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.mapred.JobID downgrade(org.apache.hadoop.mapreduce.JobID)":{"name":"downgrade","returnType":"org.apache.hadoop.mapred.JobID","args":["org.apache.hadoop.mapreduce.JobID"],"exceptions":[]}}},"org.apache.hadoop.mapreduce.lib.input.FileSplit":{"name":"org.apache.hadoop.mapreduce.lib.input.FileSplit","methods":{"long getStart()":{"name":"getStart","returnType":"long","args":[],"exceptions":[]},"[Ljava.lang.String; getLocations() throws java.io.IOException":{"name":"getLocations","returnType":"[Ljava.lang.String;","args":[],"exceptions":["java.io.IOException"]},"[Lorg.apache.hadoop.mapred.SplitLocationInfo; getLocationInfo() throws java.io.IOException":{"name":"getLocationInfo","returnType":"[Lorg.apache.hadoop.mapred.SplitLocationInfo;","args":[],"exceptions":["java.io.IOException"]},"long getLength()":{"name":"getLength","returnType":"long","args":[],"exceptions":[]},"java.lang.String toString()":{"name":"toString","returnType":"java.lang.String","args":[],"exceptions":[]},"void write(java.io.DataOutput) throws java.io.IOException":{"name":"write","returnType":"void","args":["java.io.DataOutput"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.Path getPath()":{"name":"getPath","returnType":"org.apache.hadoop.fs.Path","args":[],"exceptions":[]},"void readFields(java.io.DataInput) throws java.io.IOException":{"name":"readFields","returnType":"void","args":["java.io.DataInput"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.mapreduce.lib.fieldsel.FieldSelectionReducer":{"name":"org.apache.hadoop.mapreduce.lib.fieldsel.FieldSelectionReducer","methods":{"void reduce(java.lang.Object, java.lang.Iterable, org.apache.hadoop.mapreduce.Reducer$Context) throws java.lang.InterruptedException, java.io.IOException":{"name":"reduce","returnType":"void","args":["java.lang.Object","java.lang.Iterable","org.apache.hadoop.mapreduce.Reducer$Context"],"exceptions":["java.lang.InterruptedException","java.io.IOException"]},"void reduce(org.apache.hadoop.io.Text, java.lang.Iterable, org.apache.hadoop.mapreduce.Reducer$Context) throws java.lang.InterruptedException, java.io.IOException":{"name":"reduce","returnType":"void","args":["org.apache.hadoop.io.Text","java.lang.Iterable","org.apache.hadoop.mapreduce.Reducer$Context"],"exceptions":["java.lang.InterruptedException","java.io.IOException"]},"void setup(org.apache.hadoop.mapreduce.Reducer$Context) throws java.lang.InterruptedException, java.io.IOException":{"name":"setup","returnType":"void","args":["org.apache.hadoop.mapreduce.Reducer$Context"],"exceptions":["java.lang.InterruptedException","java.io.IOException"]}}},"org.apache.hadoop.mapred.lib.aggregate.StringValueMax":{"name":"org.apache.hadoop.mapred.lib.aggregate.StringValueMax","methods":{}},"org.apache.hadoop.mapreduce.lib.output.SequenceFileAsBinaryOutputFormat":{"name":"org.apache.hadoop.mapreduce.lib.output.SequenceFileAsBinaryOutputFormat","methods":{"void checkOutputSpecs(org.apache.hadoop.mapreduce.JobContext) throws java.io.IOException":{"name":"checkOutputSpecs","returnType":"void","args":["org.apache.hadoop.mapreduce.JobContext"],"exceptions":["java.io.IOException"]},"void setSequenceFileOutputKeyClass(org.apache.hadoop.mapreduce.Job, java.lang.Class)":{"name":"setSequenceFileOutputKeyClass","returnType":"void","args":["org.apache.hadoop.mapreduce.Job","java.lang.Class"],"exceptions":[]},"org.apache.hadoop.mapreduce.RecordWriter getRecordWriter(org.apache.hadoop.mapreduce.TaskAttemptContext) throws java.io.IOException":{"name":"getRecordWriter","returnType":"org.apache.hadoop.mapreduce.RecordWriter","args":["org.apache.hadoop.mapreduce.TaskAttemptContext"],"exceptions":["java.io.IOException"]},"java.lang.Class getSequenceFileOutputValueClass(org.apache.hadoop.mapreduce.JobContext)":{"name":"getSequenceFileOutputValueClass","returnType":"java.lang.Class","args":["org.apache.hadoop.mapreduce.JobContext"],"exceptions":[]},"void setSequenceFileOutputValueClass(org.apache.hadoop.mapreduce.Job, java.lang.Class)":{"name":"setSequenceFileOutputValueClass","returnType":"void","args":["org.apache.hadoop.mapreduce.Job","java.lang.Class"],"exceptions":[]},"java.lang.Class getSequenceFileOutputKeyClass(org.apache.hadoop.mapreduce.JobContext)":{"name":"getSequenceFileOutputKeyClass","returnType":"java.lang.Class","args":["org.apache.hadoop.mapreduce.JobContext"],"exceptions":[]}}},"org.apache.hadoop.mapred.Reducer":{"name":"org.apache.hadoop.mapred.Reducer","methods":{"void reduce(java.lang.Object, java.util.Iterator, org.apache.hadoop.mapred.OutputCollector, org.apache.hadoop.mapred.Reporter) throws java.io.IOException":{"name":"reduce","returnType":"void","args":["java.lang.Object","java.util.Iterator","org.apache.hadoop.mapred.OutputCollector","org.apache.hadoop.mapred.Reporter"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.mapred.jobcontrol.JobControl":{"name":"org.apache.hadoop.mapred.jobcontrol.JobControl","methods":{"java.util.ArrayList getReadyJobs()":{"name":"getReadyJobs","returnType":"java.util.ArrayList","args":[],"exceptions":[]},"java.util.ArrayList getFailedJobs()":{"name":"getFailedJobs","returnType":"java.util.ArrayList","args":[],"exceptions":[]},"java.util.ArrayList getSuccessfulJobs()":{"name":"getSuccessfulJobs","returnType":"java.util.ArrayList","args":[],"exceptions":[]},"java.util.ArrayList getWaitingJobs()":{"name":"getWaitingJobs","returnType":"java.util.ArrayList","args":[],"exceptions":[]},"java.util.ArrayList getRunningJobs()":{"name":"getRunningJobs","returnType":"java.util.ArrayList","args":[],"exceptions":[]},"void addJobs(java.util.Collection)":{"name":"addJobs","returnType":"void","args":["java.util.Collection"],"exceptions":[]},"int getState()":{"name":"getState","returnType":"int","args":[],"exceptions":[]}}},"org.apache.hadoop.mapreduce.lib.join.ResetableIterator":{"name":"org.apache.hadoop.mapreduce.lib.join.ResetableIterator","methods":{"void add(org.apache.hadoop.io.Writable) throws java.io.IOException":{"name":"add","returnType":"void","args":["org.apache.hadoop.io.Writable"],"exceptions":["java.io.IOException"]},"boolean next(org.apache.hadoop.io.Writable) throws java.io.IOException":{"name":"next","returnType":"boolean","args":["org.apache.hadoop.io.Writable"],"exceptions":["java.io.IOException"]},"void reset()":{"name":"reset","returnType":"void","args":[],"exceptions":[]},"boolean replay(org.apache.hadoop.io.Writable) throws java.io.IOException":{"name":"replay","returnType":"boolean","args":["org.apache.hadoop.io.Writable"],"exceptions":["java.io.IOException"]},"boolean hasNext()":{"name":"hasNext","returnType":"boolean","args":[],"exceptions":[]},"void close() throws java.io.IOException":{"name":"close","returnType":"void","args":[],"exceptions":["java.io.IOException"]},"void clear()":{"name":"clear","returnType":"void","args":[],"exceptions":[]}}},"org.apache.hadoop.mapreduce.lib.aggregate.ValueAggregatorMapper":{"name":"org.apache.hadoop.mapreduce.lib.aggregate.ValueAggregatorMapper","methods":{"void map(java.lang.Object, java.lang.Object, org.apache.hadoop.mapreduce.Mapper$Context) throws java.lang.InterruptedException, java.io.IOException":{"name":"map","returnType":"void","args":["java.lang.Object","java.lang.Object","org.apache.hadoop.mapreduce.Mapper$Context"],"exceptions":["java.lang.InterruptedException","java.io.IOException"]},"void map(org.apache.hadoop.io.WritableComparable, org.apache.hadoop.io.Writable, org.apache.hadoop.mapreduce.Mapper$Context) throws java.lang.InterruptedException, java.io.IOException":{"name":"map","returnType":"void","args":["org.apache.hadoop.io.WritableComparable","org.apache.hadoop.io.Writable","org.apache.hadoop.mapreduce.Mapper$Context"],"exceptions":["java.lang.InterruptedException","java.io.IOException"]},"void setup(org.apache.hadoop.mapreduce.Mapper$Context) throws java.lang.InterruptedException, java.io.IOException":{"name":"setup","returnType":"void","args":["org.apache.hadoop.mapreduce.Mapper$Context"],"exceptions":["java.lang.InterruptedException","java.io.IOException"]}}},"org.apache.hadoop.mapred.join.CompositeInputSplit":{"name":"org.apache.hadoop.mapred.join.CompositeInputSplit","methods":{"[Ljava.lang.String; getLocations() throws java.io.IOException":{"name":"getLocations","returnType":"[Ljava.lang.String;","args":[],"exceptions":["java.io.IOException"]},"[Ljava.lang.String; getLocation(int) throws java.io.IOException":{"name":"getLocation","returnType":"[Ljava.lang.String;","args":["int"],"exceptions":["java.io.IOException"]},"void add(org.apache.hadoop.mapred.InputSplit) throws java.io.IOException":{"name":"add","returnType":"void","args":["org.apache.hadoop.mapred.InputSplit"],"exceptions":["java.io.IOException"]},"void write(java.io.DataOutput) throws java.io.IOException":{"name":"write","returnType":"void","args":["java.io.DataOutput"],"exceptions":["java.io.IOException"]},"long getLength() throws java.io.IOException":{"name":"getLength","returnType":"long","args":[],"exceptions":["java.io.IOException"]},"org.apache.hadoop.mapred.InputSplit get(int)":{"name":"get","returnType":"org.apache.hadoop.mapred.InputSplit","args":["int"],"exceptions":[]},"long getLength(int) throws java.io.IOException":{"name":"getLength","returnType":"long","args":["int"],"exceptions":["java.io.IOException"]},"void readFields(java.io.DataInput) throws java.io.IOException":{"name":"readFields","returnType":"void","args":["java.io.DataInput"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.mapred.lib.BinaryPartitioner":{"name":"org.apache.hadoop.mapred.lib.BinaryPartitioner","methods":{"void configure(org.apache.hadoop.mapred.JobConf)":{"name":"configure","returnType":"void","args":["org.apache.hadoop.mapred.JobConf"],"exceptions":[]}}},"org.apache.hadoop.mapreduce.lib.partition.KeyFieldBasedComparator":{"name":"org.apache.hadoop.mapreduce.lib.partition.KeyFieldBasedComparator","methods":{"void setKeyFieldComparatorOptions(org.apache.hadoop.mapreduce.Job, java.lang.String)":{"name":"setKeyFieldComparatorOptions","returnType":"void","args":["org.apache.hadoop.mapreduce.Job","java.lang.String"],"exceptions":[]},"int compare([B, int, int, [B, int, int)":{"name":"compare","returnType":"int","args":["[B","int","int","[B","int","int"],"exceptions":[]},"org.apache.hadoop.conf.Configuration getConf()":{"name":"getConf","returnType":"org.apache.hadoop.conf.Configuration","args":[],"exceptions":[]},"java.lang.String getKeyFieldComparatorOption(org.apache.hadoop.mapreduce.JobContext)":{"name":"getKeyFieldComparatorOption","returnType":"java.lang.String","args":["org.apache.hadoop.mapreduce.JobContext"],"exceptions":[]},"void setConf(org.apache.hadoop.conf.Configuration)":{"name":"setConf","returnType":"void","args":["org.apache.hadoop.conf.Configuration"],"exceptions":[]}}},"org.apache.hadoop.mapreduce.lib.input.SequenceFileAsTextInputFormat":{"name":"org.apache.hadoop.mapreduce.lib.input.SequenceFileAsTextInputFormat","methods":{"org.apache.hadoop.mapreduce.RecordReader createRecordReader(org.apache.hadoop.mapreduce.InputSplit, org.apache.hadoop.mapreduce.TaskAttemptContext) throws java.io.IOException":{"name":"createRecordReader","returnType":"org.apache.hadoop.mapreduce.RecordReader","args":["org.apache.hadoop.mapreduce.InputSplit","org.apache.hadoop.mapreduce.TaskAttemptContext"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.mapred.MultiFileSplit":{"name":"org.apache.hadoop.mapred.MultiFileSplit","methods":{"[Ljava.lang.String; getLocations() throws java.io.IOException":{"name":"getLocations","returnType":"[Ljava.lang.String;","args":[],"exceptions":["java.io.IOException"]},"java.lang.String toString()":{"name":"toString","returnType":"java.lang.String","args":[],"exceptions":[]}}},"org.apache.hadoop.mapred.JobQueueInfo":{"name":"org.apache.hadoop.mapred.JobQueueInfo","methods":{"void setQueueName(java.lang.String)":{"name":"setQueueName","returnType":"void","args":["java.lang.String"],"exceptions":[]},"void setJobStatuses([Lorg.apache.hadoop.mapreduce.JobStatus;)":{"name":"setJobStatuses","returnType":"void","args":["[Lorg.apache.hadoop.mapreduce.JobStatus;"],"exceptions":[]},"void setChildren(java.util.List)":{"name":"setChildren","returnType":"void","args":["java.util.List"],"exceptions":[]},"java.util.List getChildren()":{"name":"getChildren","returnType":"java.util.List","args":[],"exceptions":[]},"void setQueueState(java.lang.String)":{"name":"setQueueState","returnType":"void","args":["java.lang.String"],"exceptions":[]},"java.lang.String getQueueState()":{"name":"getQueueState","returnType":"java.lang.String","args":[],"exceptions":[]},"void setSchedulingInfo(java.lang.String)":{"name":"setSchedulingInfo","returnType":"void","args":["java.lang.String"],"exceptions":[]},"void setProperties(java.util.Properties)":{"name":"setProperties","returnType":"void","args":["java.util.Properties"],"exceptions":[]}}},"org.apache.hadoop.mapred.lib.db.DBOutputFormat":{"name":"org.apache.hadoop.mapred.lib.db.DBOutputFormat","methods":{"void setOutput(org.apache.hadoop.mapred.JobConf, java.lang.String, [Ljava.lang.String;)":{"name":"setOutput","returnType":"void","args":["org.apache.hadoop.mapred.JobConf","java.lang.String","[Ljava.lang.String;"],"exceptions":[]},"void checkOutputSpecs(org.apache.hadoop.fs.FileSystem, org.apache.hadoop.mapred.JobConf) throws java.io.IOException":{"name":"checkOutputSpecs","returnType":"void","args":["org.apache.hadoop.fs.FileSystem","org.apache.hadoop.mapred.JobConf"],"exceptions":["java.io.IOException"]},"void setOutput(org.apache.hadoop.mapred.JobConf, java.lang.String, int)":{"name":"setOutput","returnType":"void","args":["org.apache.hadoop.mapred.JobConf","java.lang.String","int"],"exceptions":[]},"org.apache.hadoop.mapred.RecordWriter getRecordWriter(org.apache.hadoop.fs.FileSystem, org.apache.hadoop.mapred.JobConf, java.lang.String, org.apache.hadoop.util.Progressable) throws java.io.IOException":{"name":"getRecordWriter","returnType":"org.apache.hadoop.mapred.RecordWriter","args":["org.apache.hadoop.fs.FileSystem","org.apache.hadoop.mapred.JobConf","java.lang.String","org.apache.hadoop.util.Progressable"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.mapred.RecordWriter":{"name":"org.apache.hadoop.mapred.RecordWriter","methods":{"void close(org.apache.hadoop.mapred.Reporter) throws java.io.IOException":{"name":"close","returnType":"void","args":["org.apache.hadoop.mapred.Reporter"],"exceptions":["java.io.IOException"]},"void write(java.lang.Object, java.lang.Object) throws java.io.IOException":{"name":"write","returnType":"void","args":["java.lang.Object","java.lang.Object"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.mapred.FileAlreadyExistsException":{"name":"org.apache.hadoop.mapred.FileAlreadyExistsException","methods":{}},"org.apache.hadoop.mapreduce.lib.join.JoinRecordReader":{"name":"org.apache.hadoop.mapreduce.lib.join.JoinRecordReader","methods":{"boolean nextKeyValue() throws java.lang.InterruptedException, java.io.IOException":{"name":"nextKeyValue","returnType":"boolean","args":[],"exceptions":["java.lang.InterruptedException","java.io.IOException"]},"org.apache.hadoop.mapreduce.lib.join.TupleWritable createValue()":{"name":"createValue","returnType":"org.apache.hadoop.mapreduce.lib.join.TupleWritable","args":[],"exceptions":[]},"org.apache.hadoop.io.Writable createValue()":{"name":"createValue","returnType":"org.apache.hadoop.io.Writable","args":[],"exceptions":[]}}},"org.apache.hadoop.mapreduce.lib.join.TupleWritable":{"name":"org.apache.hadoop.mapreduce.lib.join.TupleWritable","methods":{"int hashCode()":{"name":"hashCode","returnType":"int","args":[],"exceptions":[]},"java.lang.String toString()":{"name":"toString","returnType":"java.lang.String","args":[],"exceptions":[]},"void write(java.io.DataOutput) throws java.io.IOException":{"name":"write","returnType":"void","args":["java.io.DataOutput"],"exceptions":["java.io.IOException"]},"int size()":{"name":"size","returnType":"int","args":[],"exceptions":[]},"java.util.Iterator iterator()":{"name":"iterator","returnType":"java.util.Iterator","args":[],"exceptions":[]},"org.apache.hadoop.io.Writable get(int)":{"name":"get","returnType":"org.apache.hadoop.io.Writable","args":["int"],"exceptions":[]},"boolean equals(java.lang.Object)":{"name":"equals","returnType":"boolean","args":["java.lang.Object"],"exceptions":[]},"boolean has(int)":{"name":"has","returnType":"boolean","args":["int"],"exceptions":[]},"void readFields(java.io.DataInput) throws java.io.IOException":{"name":"readFields","returnType":"void","args":["java.io.DataInput"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.mapred.lib.CombineTextInputFormat":{"name":"org.apache.hadoop.mapred.lib.CombineTextInputFormat","methods":{"org.apache.hadoop.mapred.RecordReader getRecordReader(org.apache.hadoop.mapred.InputSplit, org.apache.hadoop.mapred.JobConf, org.apache.hadoop.mapred.Reporter) throws java.io.IOException":{"name":"getRecordReader","returnType":"org.apache.hadoop.mapred.RecordReader","args":["org.apache.hadoop.mapred.InputSplit","org.apache.hadoop.mapred.JobConf","org.apache.hadoop.mapred.Reporter"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.mapred.lib.IdentityReducer":{"name":"org.apache.hadoop.mapred.lib.IdentityReducer","methods":{"void reduce(java.lang.Object, java.util.Iterator, org.apache.hadoop.mapred.OutputCollector, org.apache.hadoop.mapred.Reporter) throws java.io.IOException":{"name":"reduce","returnType":"void","args":["java.lang.Object","java.util.Iterator","org.apache.hadoop.mapred.OutputCollector","org.apache.hadoop.mapred.Reporter"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.mapreduce.TaskID":{"name":"org.apache.hadoop.mapreduce.TaskID","methods":{"int hashCode()":{"name":"hashCode","returnType":"int","args":[],"exceptions":[]},"int compareTo(org.apache.hadoop.mapreduce.ID)":{"name":"compareTo","returnType":"int","args":["org.apache.hadoop.mapreduce.ID"],"exceptions":[]},"org.apache.hadoop.mapreduce.TaskType getTaskType(char)":{"name":"getTaskType","returnType":"org.apache.hadoop.mapreduce.TaskType","args":["char"],"exceptions":[]},"boolean isMap()":{"name":"isMap","returnType":"boolean","args":[],"exceptions":[]},"java.lang.String getAllTaskTypes()":{"name":"getAllTaskTypes","returnType":"java.lang.String","args":[],"exceptions":[]},"int compareTo(java.lang.Object)":{"name":"compareTo","returnType":"int","args":["java.lang.Object"],"exceptions":[]},"boolean equals(java.lang.Object)":{"name":"equals","returnType":"boolean","args":["java.lang.Object"],"exceptions":[]},"char getRepresentingCharacter(org.apache.hadoop.mapreduce.TaskType)":{"name":"getRepresentingCharacter","returnType":"char","args":["org.apache.hadoop.mapreduce.TaskType"],"exceptions":[]},"org.apache.hadoop.mapreduce.TaskID forName(java.lang.String) throws java.lang.IllegalArgumentException":{"name":"forName","returnType":"org.apache.hadoop.mapreduce.TaskID","args":["java.lang.String"],"exceptions":["java.lang.IllegalArgumentException"]},"java.lang.String toString()":{"name":"toString","returnType":"java.lang.String","args":[],"exceptions":[]},"void write(java.io.DataOutput) throws java.io.IOException":{"name":"write","returnType":"void","args":["java.io.DataOutput"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.mapreduce.JobID getJobID()":{"name":"getJobID","returnType":"org.apache.hadoop.mapreduce.JobID","args":[],"exceptions":[]},"org.apache.hadoop.mapreduce.TaskType getTaskType()":{"name":"getTaskType","returnType":"org.apache.hadoop.mapreduce.TaskType","args":[],"exceptions":[]},"void readFields(java.io.DataInput) throws java.io.IOException":{"name":"readFields","returnType":"void","args":["java.io.DataInput"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.filecache.DistributedCache":{"name":"org.apache.hadoop.filecache.DistributedCache","methods":{"void setLocalArchives(org.apache.hadoop.conf.Configuration, java.lang.String)":{"name":"setLocalArchives","returnType":"void","args":["org.apache.hadoop.conf.Configuration","java.lang.String"],"exceptions":[]},"long getTimestamp(org.apache.hadoop.conf.Configuration, java.net.URI) throws java.io.IOException":{"name":"getTimestamp","returnType":"long","args":["org.apache.hadoop.conf.Configuration","java.net.URI"],"exceptions":["java.io.IOException"]},"void setFileTimestamps(org.apache.hadoop.conf.Configuration, java.lang.String)":{"name":"setFileTimestamps","returnType":"void","args":["org.apache.hadoop.conf.Configuration","java.lang.String"],"exceptions":[]},"void addLocalFiles(org.apache.hadoop.conf.Configuration, java.lang.String)":{"name":"addLocalFiles","returnType":"void","args":["org.apache.hadoop.conf.Configuration","java.lang.String"],"exceptions":[]},"void createAllSymlink(org.apache.hadoop.conf.Configuration, java.io.File, java.io.File) throws java.io.IOException":{"name":"createAllSymlink","returnType":"void","args":["org.apache.hadoop.conf.Configuration","java.io.File","java.io.File"],"exceptions":["java.io.IOException"]},"void setArchiveTimestamps(org.apache.hadoop.conf.Configuration, java.lang.String)":{"name":"setArchiveTimestamps","returnType":"void","args":["org.apache.hadoop.conf.Configuration","java.lang.String"],"exceptions":[]},"void addLocalArchives(org.apache.hadoop.conf.Configuration, java.lang.String)":{"name":"addLocalArchives","returnType":"void","args":["org.apache.hadoop.conf.Configuration","java.lang.String"],"exceptions":[]},"void setLocalFiles(org.apache.hadoop.conf.Configuration, java.lang.String)":{"name":"setLocalFiles","returnType":"void","args":["org.apache.hadoop.conf.Configuration","java.lang.String"],"exceptions":[]},"org.apache.hadoop.fs.FileStatus getFileStatus(org.apache.hadoop.conf.Configuration, java.net.URI) throws java.io.IOException":{"name":"getFileStatus","returnType":"org.apache.hadoop.fs.FileStatus","args":["org.apache.hadoop.conf.Configuration","java.net.URI"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.mapred.jobcontrol.Job":{"name":"org.apache.hadoop.mapred.jobcontrol.Job","methods":{"java.lang.String getMapredJobID()":{"name":"getMapredJobID","returnType":"java.lang.String","args":[],"exceptions":[]},"void setMapredJobID(java.lang.String)":{"name":"setMapredJobID","returnType":"void","args":["java.lang.String"],"exceptions":[]},"org.apache.hadoop.mapred.JobID getAssignedJobID()":{"name":"getAssignedJobID","returnType":"org.apache.hadoop.mapred.JobID","args":[],"exceptions":[]},"void setJobConf(org.apache.hadoop.mapred.JobConf)":{"name":"setJobConf","returnType":"void","args":["org.apache.hadoop.mapred.JobConf"],"exceptions":[]},"java.util.ArrayList getDependingJobs()":{"name":"getDependingJobs","returnType":"java.util.ArrayList","args":[],"exceptions":[]},"org.apache.hadoop.mapred.JobConf getJobConf()":{"name":"getJobConf","returnType":"org.apache.hadoop.mapred.JobConf","args":[],"exceptions":[]},"boolean addDependingJob(org.apache.hadoop.mapred.jobcontrol.Job)":{"name":"addDependingJob","returnType":"boolean","args":["org.apache.hadoop.mapred.jobcontrol.Job"],"exceptions":[]},"int getState()":{"name":"getState","returnType":"int","args":[],"exceptions":[]},"void setAssignedJobID(org.apache.hadoop.mapred.JobID)":{"name":"setAssignedJobID","returnType":"void","args":["org.apache.hadoop.mapred.JobID"],"exceptions":[]},"org.apache.hadoop.mapred.JobClient getJobClient()":{"name":"getJobClient","returnType":"org.apache.hadoop.mapred.JobClient","args":[],"exceptions":[]}}},"org.apache.hadoop.mapreduce.lib.aggregate.ValueAggregator":{"name":"org.apache.hadoop.mapreduce.lib.aggregate.ValueAggregator","methods":{"void reset()":{"name":"reset","returnType":"void","args":[],"exceptions":[]},"java.util.ArrayList getCombinerOutput()":{"name":"getCombinerOutput","returnType":"java.util.ArrayList","args":[],"exceptions":[]},"java.lang.String getReport()":{"name":"getReport","returnType":"java.lang.String","args":[],"exceptions":[]},"void addNextValue(java.lang.Object)":{"name":"addNextValue","returnType":"void","args":["java.lang.Object"],"exceptions":[]}}},"org.apache.hadoop.mapreduce.lib.partition.InputSampler":{"name":"org.apache.hadoop.mapreduce.lib.partition.InputSampler","methods":{"void main([Ljava.lang.String;) throws java.lang.Exception":{"name":"main","returnType":"void","args":["[Ljava.lang.String;"],"exceptions":["java.lang.Exception"]},"void writePartitionFile(org.apache.hadoop.mapreduce.Job, org.apache.hadoop.mapreduce.lib.partition.InputSampler$Sampler) throws java.lang.InterruptedException, java.io.IOException, java.lang.ClassNotFoundException":{"name":"writePartitionFile","returnType":"void","args":["org.apache.hadoop.mapreduce.Job","org.apache.hadoop.mapreduce.lib.partition.InputSampler$Sampler"],"exceptions":["java.lang.InterruptedException","java.io.IOException","java.lang.ClassNotFoundException"]},"int run([Ljava.lang.String;) throws java.lang.Exception":{"name":"run","returnType":"int","args":["[Ljava.lang.String;"],"exceptions":["java.lang.Exception"]}}},"org.apache.hadoop.mapred.lib.db.DBWritable":{"name":"org.apache.hadoop.mapred.lib.db.DBWritable","methods":{}},"org.apache.hadoop.mapreduce.lib.join.MultiFilterRecordReader":{"name":"org.apache.hadoop.mapreduce.lib.join.MultiFilterRecordReader","methods":{"void initialize(org.apache.hadoop.mapreduce.InputSplit, org.apache.hadoop.mapreduce.TaskAttemptContext) throws java.lang.InterruptedException, java.io.IOException":{"name":"initialize","returnType":"void","args":["org.apache.hadoop.mapreduce.InputSplit","org.apache.hadoop.mapreduce.TaskAttemptContext"],"exceptions":["java.lang.InterruptedException","java.io.IOException"]},"boolean nextKeyValue() throws java.lang.InterruptedException, java.io.IOException":{"name":"nextKeyValue","returnType":"boolean","args":[],"exceptions":["java.lang.InterruptedException","java.io.IOException"]}}},"org.apache.hadoop.mapred.SequenceFileAsBinaryOutputFormat":{"name":"org.apache.hadoop.mapred.SequenceFileAsBinaryOutputFormat","methods":{"void setSequenceFileOutputValueClass(org.apache.hadoop.mapred.JobConf, java.lang.Class)":{"name":"setSequenceFileOutputValueClass","returnType":"void","args":["org.apache.hadoop.mapred.JobConf","java.lang.Class"],"exceptions":[]},"java.lang.Class getSequenceFileOutputKeyClass(org.apache.hadoop.mapred.JobConf)":{"name":"getSequenceFileOutputKeyClass","returnType":"java.lang.Class","args":["org.apache.hadoop.mapred.JobConf"],"exceptions":[]},"void checkOutputSpecs(org.apache.hadoop.fs.FileSystem, org.apache.hadoop.mapred.JobConf) throws java.io.IOException":{"name":"checkOutputSpecs","returnType":"void","args":["org.apache.hadoop.fs.FileSystem","org.apache.hadoop.mapred.JobConf"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.mapred.RecordWriter getRecordWriter(org.apache.hadoop.fs.FileSystem, org.apache.hadoop.mapred.JobConf, java.lang.String, org.apache.hadoop.util.Progressable) throws java.io.IOException":{"name":"getRecordWriter","returnType":"org.apache.hadoop.mapred.RecordWriter","args":["org.apache.hadoop.fs.FileSystem","org.apache.hadoop.mapred.JobConf","java.lang.String","org.apache.hadoop.util.Progressable"],"exceptions":["java.io.IOException"]},"void setSequenceFileOutputKeyClass(org.apache.hadoop.mapred.JobConf, java.lang.Class)":{"name":"setSequenceFileOutputKeyClass","returnType":"void","args":["org.apache.hadoop.mapred.JobConf","java.lang.Class"],"exceptions":[]},"java.lang.Class getSequenceFileOutputValueClass(org.apache.hadoop.mapred.JobConf)":{"name":"getSequenceFileOutputValueClass","returnType":"java.lang.Class","args":["org.apache.hadoop.mapred.JobConf"],"exceptions":[]}}},"org.apache.hadoop.mapreduce.lib.join.OuterJoinRecordReader":{"name":"org.apache.hadoop.mapreduce.lib.join.OuterJoinRecordReader","methods":{}},"org.apache.hadoop.mapreduce.lib.input.CombineSequenceFileInputFormat":{"name":"org.apache.hadoop.mapreduce.lib.input.CombineSequenceFileInputFormat","methods":{"org.apache.hadoop.mapreduce.RecordReader createRecordReader(org.apache.hadoop.mapreduce.InputSplit, org.apache.hadoop.mapreduce.TaskAttemptContext) throws java.io.IOException":{"name":"createRecordReader","returnType":"org.apache.hadoop.mapreduce.RecordReader","args":["org.apache.hadoop.mapreduce.InputSplit","org.apache.hadoop.mapreduce.TaskAttemptContext"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.mapred.lib.aggregate.UniqValueCount":{"name":"org.apache.hadoop.mapred.lib.aggregate.UniqValueCount","methods":{}},"org.apache.hadoop.mapred.lib.MultipleOutputFormat":{"name":"org.apache.hadoop.mapred.lib.MultipleOutputFormat","methods":{"org.apache.hadoop.mapred.RecordWriter getRecordWriter(org.apache.hadoop.fs.FileSystem, org.apache.hadoop.mapred.JobConf, java.lang.String, org.apache.hadoop.util.Progressable) throws java.io.IOException":{"name":"getRecordWriter","returnType":"org.apache.hadoop.mapred.RecordWriter","args":["org.apache.hadoop.fs.FileSystem","org.apache.hadoop.mapred.JobConf","java.lang.String","org.apache.hadoop.util.Progressable"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.mapred.JobStatus":{"name":"org.apache.hadoop.mapred.JobStatus","methods":{"float mapProgress()":{"name":"mapProgress","returnType":"float","args":[],"exceptions":[]},"float setupProgress()":{"name":"setupProgress","returnType":"float","args":[],"exceptions":[]},"org.apache.hadoop.mapred.JobPriority getJobPriority()":{"name":"getJobPriority","returnType":"org.apache.hadoop.mapred.JobPriority","args":[],"exceptions":[]},"org.apache.hadoop.mapred.JobID getJobID()":{"name":"getJobID","returnType":"org.apache.hadoop.mapred.JobID","args":[],"exceptions":[]},"void setRunState(int)":{"name":"setRunState","returnType":"void","args":["int"],"exceptions":[]},"float cleanupProgress()":{"name":"cleanupProgress","returnType":"float","args":[],"exceptions":[]},"org.apache.hadoop.mapred.JobStatus downgrade(org.apache.hadoop.mapreduce.JobStatus)":{"name":"downgrade","returnType":"org.apache.hadoop.mapred.JobStatus","args":["org.apache.hadoop.mapreduce.JobStatus"],"exceptions":[]},"int getRunState()":{"name":"getRunState","returnType":"int","args":[],"exceptions":[]},"float reduceProgress()":{"name":"reduceProgress","returnType":"float","args":[],"exceptions":[]},"void setFailureInfo(java.lang.String)":{"name":"setFailureInfo","returnType":"void","args":["java.lang.String"],"exceptions":[]},"org.apache.hadoop.mapreduce.JobID getJobID()":{"name":"getJobID","returnType":"org.apache.hadoop.mapreduce.JobID","args":[],"exceptions":[]},"void setJobPriority(org.apache.hadoop.mapred.JobPriority)":{"name":"setJobPriority","returnType":"void","args":["org.apache.hadoop.mapred.JobPriority"],"exceptions":[]},"java.lang.String getJobRunState(int)":{"name":"getJobRunState","returnType":"java.lang.String","args":["int"],"exceptions":[]},"java.lang.String getJobId()":{"name":"getJobId","returnType":"java.lang.String","args":[],"exceptions":[]},"void setSchedulingInfo(java.lang.String)":{"name":"setSchedulingInfo","returnType":"void","args":["java.lang.String"],"exceptions":[]}}},"org.apache.hadoop.mapred.join.ComposableInputFormat":{"name":"org.apache.hadoop.mapred.join.ComposableInputFormat","methods":{"org.apache.hadoop.mapred.join.ComposableRecordReader getRecordReader(org.apache.hadoop.mapred.InputSplit, org.apache.hadoop.mapred.JobConf, org.apache.hadoop.mapred.Reporter) throws java.io.IOException":{"name":"getRecordReader","returnType":"org.apache.hadoop.mapred.join.ComposableRecordReader","args":["org.apache.hadoop.mapred.InputSplit","org.apache.hadoop.mapred.JobConf","org.apache.hadoop.mapred.Reporter"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.mapreduce.lib.join.WrappedRecordReader":{"name":"org.apache.hadoop.mapreduce.lib.join.WrappedRecordReader","methods":{"void accept(org.apache.hadoop.mapreduce.lib.join.CompositeRecordReader$JoinCollector, org.apache.hadoop.io.WritableComparable) throws java.lang.InterruptedException, java.io.IOException":{"name":"accept","returnType":"void","args":["org.apache.hadoop.mapreduce.lib.join.CompositeRecordReader$JoinCollector","org.apache.hadoop.io.WritableComparable"],"exceptions":["java.lang.InterruptedException","java.io.IOException"]},"int hashCode()":{"name":"hashCode","returnType":"int","args":[],"exceptions":[]},"int id()":{"name":"id","returnType":"int","args":[],"exceptions":[]},"void initialize(org.apache.hadoop.mapreduce.InputSplit, org.apache.hadoop.mapreduce.TaskAttemptContext) throws java.lang.InterruptedException, java.io.IOException":{"name":"initialize","returnType":"void","args":["org.apache.hadoop.mapreduce.InputSplit","org.apache.hadoop.mapreduce.TaskAttemptContext"],"exceptions":["java.lang.InterruptedException","java.io.IOException"]},"void skip(org.apache.hadoop.io.WritableComparable) throws java.lang.InterruptedException, java.io.IOException":{"name":"skip","returnType":"void","args":["org.apache.hadoop.io.WritableComparable"],"exceptions":["java.lang.InterruptedException","java.io.IOException"]},"int compareTo(org.apache.hadoop.mapreduce.lib.join.ComposableRecordReader)":{"name":"compareTo","returnType":"int","args":["org.apache.hadoop.mapreduce.lib.join.ComposableRecordReader"],"exceptions":[]},"org.apache.hadoop.io.WritableComparable key()":{"name":"key","returnType":"org.apache.hadoop.io.WritableComparable","args":[],"exceptions":[]},"float getProgress() throws java.lang.InterruptedException, java.io.IOException":{"name":"getProgress","returnType":"float","args":[],"exceptions":["java.lang.InterruptedException","java.io.IOException"]},"int compareTo(java.lang.Object)":{"name":"compareTo","returnType":"int","args":["java.lang.Object"],"exceptions":[]},"boolean hasNext()":{"name":"hasNext","returnType":"boolean","args":[],"exceptions":[]},"boolean equals(java.lang.Object)":{"name":"equals","returnType":"boolean","args":["java.lang.Object"],"exceptions":[]},"org.apache.hadoop.io.Writable getCurrentValue() throws java.lang.InterruptedException, java.io.IOException":{"name":"getCurrentValue","returnType":"org.apache.hadoop.io.Writable","args":[],"exceptions":["java.lang.InterruptedException","java.io.IOException"]},"java.lang.Object getCurrentValue() throws java.lang.InterruptedException, java.io.IOException":{"name":"getCurrentValue","returnType":"java.lang.Object","args":[],"exceptions":["java.lang.InterruptedException","java.io.IOException"]},"org.apache.hadoop.io.WritableComparable getCurrentKey() throws java.lang.InterruptedException, java.io.IOException":{"name":"getCurrentKey","returnType":"org.apache.hadoop.io.WritableComparable","args":[],"exceptions":["java.lang.InterruptedException","java.io.IOException"]},"org.apache.hadoop.io.WritableComparable createKey()":{"name":"createKey","returnType":"org.apache.hadoop.io.WritableComparable","args":[],"exceptions":[]},"boolean nextKeyValue() throws java.lang.InterruptedException, java.io.IOException":{"name":"nextKeyValue","returnType":"boolean","args":[],"exceptions":["java.lang.InterruptedException","java.io.IOException"]},"org.apache.hadoop.io.Writable createValue()":{"name":"createValue","returnType":"org.apache.hadoop.io.Writable","args":[],"exceptions":[]},"java.lang.Object getCurrentKey() throws java.lang.InterruptedException, java.io.IOException":{"name":"getCurrentKey","returnType":"java.lang.Object","args":[],"exceptions":["java.lang.InterruptedException","java.io.IOException"]},"void close() throws java.io.IOException":{"name":"close","returnType":"void","args":[],"exceptions":["java.io.IOException"]},"void key(org.apache.hadoop.io.WritableComparable) throws java.io.IOException":{"name":"key","returnType":"void","args":["org.apache.hadoop.io.WritableComparable"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.mapred.TaskAttemptContext":{"name":"org.apache.hadoop.mapred.TaskAttemptContext","methods":{"org.apache.hadoop.util.Progressable getProgressible()":{"name":"getProgressible","returnType":"org.apache.hadoop.util.Progressable","args":[],"exceptions":[]},"org.apache.hadoop.mapred.TaskAttemptID getTaskAttemptID()":{"name":"getTaskAttemptID","returnType":"org.apache.hadoop.mapred.TaskAttemptID","args":[],"exceptions":[]},"org.apache.hadoop.mapred.JobConf getJobConf()":{"name":"getJobConf","returnType":"org.apache.hadoop.mapred.JobConf","args":[],"exceptions":[]}}},"org.apache.hadoop.mapreduce.Reducer":{"name":"org.apache.hadoop.mapreduce.Reducer","methods":{"void run(org.apache.hadoop.mapreduce.Reducer$Context) throws java.lang.InterruptedException, java.io.IOException":{"name":"run","returnType":"void","args":["org.apache.hadoop.mapreduce.Reducer$Context"],"exceptions":["java.lang.InterruptedException","java.io.IOException"]}}},"org.apache.hadoop.mapreduce.lib.join.CompositeInputSplit":{"name":"org.apache.hadoop.mapreduce.lib.join.CompositeInputSplit","methods":{"[Ljava.lang.String; getLocation(int) throws java.lang.InterruptedException, java.io.IOException":{"name":"getLocation","returnType":"[Ljava.lang.String;","args":["int"],"exceptions":["java.lang.InterruptedException","java.io.IOException"]},"void write(java.io.DataOutput) throws java.io.IOException":{"name":"write","returnType":"void","args":["java.io.DataOutput"],"exceptions":["java.io.IOException"]},"[Ljava.lang.String; getLocations() throws java.lang.InterruptedException, java.io.IOException":{"name":"getLocations","returnType":"[Ljava.lang.String;","args":[],"exceptions":["java.lang.InterruptedException","java.io.IOException"]},"long getLength() throws java.io.IOException":{"name":"getLength","returnType":"long","args":[],"exceptions":["java.io.IOException"]},"void add(org.apache.hadoop.mapreduce.InputSplit) throws java.lang.InterruptedException, java.io.IOException":{"name":"add","returnType":"void","args":["org.apache.hadoop.mapreduce.InputSplit"],"exceptions":["java.lang.InterruptedException","java.io.IOException"]},"long getLength(int) throws java.lang.InterruptedException, java.io.IOException":{"name":"getLength","returnType":"long","args":["int"],"exceptions":["java.lang.InterruptedException","java.io.IOException"]},"org.apache.hadoop.mapreduce.InputSplit get(int)":{"name":"get","returnType":"org.apache.hadoop.mapreduce.InputSplit","args":["int"],"exceptions":[]},"void readFields(java.io.DataInput) throws java.io.IOException":{"name":"readFields","returnType":"void","args":["java.io.DataInput"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.mapred.TaskID":{"name":"org.apache.hadoop.mapred.TaskID","methods":{"org.apache.hadoop.mapred.TaskID read(java.io.DataInput) throws java.io.IOException":{"name":"read","returnType":"org.apache.hadoop.mapred.TaskID","args":["java.io.DataInput"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.mapred.JobID getJobID()":{"name":"getJobID","returnType":"org.apache.hadoop.mapred.JobID","args":[],"exceptions":[]},"org.apache.hadoop.mapreduce.JobID getJobID()":{"name":"getJobID","returnType":"org.apache.hadoop.mapreduce.JobID","args":[],"exceptions":[]},"java.lang.String getTaskIDsPattern(java.lang.String, java.lang.Integer, java.lang.Boolean, java.lang.Integer)":{"name":"getTaskIDsPattern","returnType":"java.lang.String","args":["java.lang.String","java.lang.Integer","java.lang.Boolean","java.lang.Integer"],"exceptions":[]},"org.apache.hadoop.mapred.TaskID downgrade(org.apache.hadoop.mapreduce.TaskID)":{"name":"downgrade","returnType":"org.apache.hadoop.mapred.TaskID","args":["org.apache.hadoop.mapreduce.TaskID"],"exceptions":[]},"org.apache.hadoop.mapred.TaskID forName(java.lang.String) throws java.lang.IllegalArgumentException":{"name":"forName","returnType":"org.apache.hadoop.mapred.TaskID","args":["java.lang.String"],"exceptions":["java.lang.IllegalArgumentException"]},"java.lang.String getTaskIDsPattern(java.lang.String, java.lang.Integer, org.apache.hadoop.mapreduce.TaskType, java.lang.Integer)":{"name":"getTaskIDsPattern","returnType":"java.lang.String","args":["java.lang.String","java.lang.Integer","org.apache.hadoop.mapreduce.TaskType","java.lang.Integer"],"exceptions":[]}}},"org.apache.hadoop.mapreduce.JobID":{"name":"org.apache.hadoop.mapreduce.JobID","methods":{"int hashCode()":{"name":"hashCode","returnType":"int","args":[],"exceptions":[]},"int compareTo(org.apache.hadoop.mapreduce.ID)":{"name":"compareTo","returnType":"int","args":["org.apache.hadoop.mapreduce.ID"],"exceptions":[]},"org.apache.hadoop.mapreduce.JobID forName(java.lang.String) throws java.lang.IllegalArgumentException":{"name":"forName","returnType":"org.apache.hadoop.mapreduce.JobID","args":["java.lang.String"],"exceptions":["java.lang.IllegalArgumentException"]},"java.lang.String toString()":{"name":"toString","returnType":"java.lang.String","args":[],"exceptions":[]},"void write(java.io.DataOutput) throws java.io.IOException":{"name":"write","returnType":"void","args":["java.io.DataOutput"],"exceptions":["java.io.IOException"]},"java.lang.String getJtIdentifier()":{"name":"getJtIdentifier","returnType":"java.lang.String","args":[],"exceptions":[]},"java.lang.StringBuilder appendTo(java.lang.StringBuilder)":{"name":"appendTo","returnType":"java.lang.StringBuilder","args":["java.lang.StringBuilder"],"exceptions":[]},"int compareTo(java.lang.Object)":{"name":"compareTo","returnType":"int","args":["java.lang.Object"],"exceptions":[]},"boolean equals(java.lang.Object)":{"name":"equals","returnType":"boolean","args":["java.lang.Object"],"exceptions":[]},"void readFields(java.io.DataInput) throws java.io.IOException":{"name":"readFields","returnType":"void","args":["java.io.DataInput"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.mapred.OutputLogFilter":{"name":"org.apache.hadoop.mapred.OutputLogFilter","methods":{"boolean accept(org.apache.hadoop.fs.Path)":{"name":"accept","returnType":"boolean","args":["org.apache.hadoop.fs.Path"],"exceptions":[]}}},"org.apache.hadoop.mapreduce.lib.map.RegexMapper":{"name":"org.apache.hadoop.mapreduce.lib.map.RegexMapper","methods":{"void map(java.lang.Object, java.lang.Object, org.apache.hadoop.mapreduce.Mapper$Context) throws java.lang.InterruptedException, java.io.IOException":{"name":"map","returnType":"void","args":["java.lang.Object","java.lang.Object","org.apache.hadoop.mapreduce.Mapper$Context"],"exceptions":["java.lang.InterruptedException","java.io.IOException"]},"void setup(org.apache.hadoop.mapreduce.Mapper$Context)":{"name":"setup","returnType":"void","args":["org.apache.hadoop.mapreduce.Mapper$Context"],"exceptions":[]},"void map(java.lang.Object, org.apache.hadoop.io.Text, org.apache.hadoop.mapreduce.Mapper$Context) throws java.lang.InterruptedException, java.io.IOException":{"name":"map","returnType":"void","args":["java.lang.Object","org.apache.hadoop.io.Text","org.apache.hadoop.mapreduce.Mapper$Context"],"exceptions":["java.lang.InterruptedException","java.io.IOException"]}}},"org.apache.hadoop.mapred.join.StreamBackedIterator":{"name":"org.apache.hadoop.mapred.join.StreamBackedIterator","methods":{}},"org.apache.hadoop.mapred.lib.FilterOutputFormat":{"name":"org.apache.hadoop.mapred.lib.FilterOutputFormat","methods":{"void checkOutputSpecs(org.apache.hadoop.fs.FileSystem, org.apache.hadoop.mapred.JobConf) throws java.io.IOException":{"name":"checkOutputSpecs","returnType":"void","args":["org.apache.hadoop.fs.FileSystem","org.apache.hadoop.mapred.JobConf"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.mapred.RecordWriter getRecordWriter(org.apache.hadoop.fs.FileSystem, org.apache.hadoop.mapred.JobConf, java.lang.String, org.apache.hadoop.util.Progressable) throws java.io.IOException":{"name":"getRecordWriter","returnType":"org.apache.hadoop.mapred.RecordWriter","args":["org.apache.hadoop.fs.FileSystem","org.apache.hadoop.mapred.JobConf","java.lang.String","org.apache.hadoop.util.Progressable"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.mapreduce.lib.aggregate.UserDefinedValueAggregatorDescriptor":{"name":"org.apache.hadoop.mapreduce.lib.aggregate.UserDefinedValueAggregatorDescriptor","methods":{"java.lang.String toString()":{"name":"toString","returnType":"java.lang.String","args":[],"exceptions":[]},"java.util.ArrayList generateKeyValPairs(java.lang.Object, java.lang.Object)":{"name":"generateKeyValPairs","returnType":"java.util.ArrayList","args":["java.lang.Object","java.lang.Object"],"exceptions":[]},"void configure(org.apache.hadoop.conf.Configuration)":{"name":"configure","returnType":"void","args":["org.apache.hadoop.conf.Configuration"],"exceptions":[]},"java.lang.Object createInstance(java.lang.String)":{"name":"createInstance","returnType":"java.lang.Object","args":["java.lang.String"],"exceptions":[]}}},"org.apache.hadoop.mapreduce.lib.aggregate.ValueHistogram":{"name":"org.apache.hadoop.mapreduce.lib.aggregate.ValueHistogram","methods":{"java.lang.String getReportDetails()":{"name":"getReportDetails","returnType":"java.lang.String","args":[],"exceptions":[]},"void reset()":{"name":"reset","returnType":"void","args":[],"exceptions":[]},"java.util.ArrayList getCombinerOutput()":{"name":"getCombinerOutput","returnType":"java.util.ArrayList","args":[],"exceptions":[]},"java.lang.String getReport()":{"name":"getReport","returnType":"java.lang.String","args":[],"exceptions":[]},"void addNextValue(java.lang.Object)":{"name":"addNextValue","returnType":"void","args":["java.lang.Object"],"exceptions":[]},"java.util.TreeMap getReportItems()":{"name":"getReportItems","returnType":"java.util.TreeMap","args":[],"exceptions":[]}}},"org.apache.hadoop.mapred.lib.aggregate.DoubleValueSum":{"name":"org.apache.hadoop.mapred.lib.aggregate.DoubleValueSum","methods":{}},"org.apache.hadoop.mapred.lib.NLineInputFormat":{"name":"org.apache.hadoop.mapred.lib.NLineInputFormat","methods":{"org.apache.hadoop.mapred.RecordReader getRecordReader(org.apache.hadoop.mapred.InputSplit, org.apache.hadoop.mapred.JobConf, org.apache.hadoop.mapred.Reporter) throws java.io.IOException":{"name":"getRecordReader","returnType":"org.apache.hadoop.mapred.RecordReader","args":["org.apache.hadoop.mapred.InputSplit","org.apache.hadoop.mapred.JobConf","org.apache.hadoop.mapred.Reporter"],"exceptions":["java.io.IOException"]},"void configure(org.apache.hadoop.mapred.JobConf)":{"name":"configure","returnType":"void","args":["org.apache.hadoop.mapred.JobConf"],"exceptions":[]},"[Lorg.apache.hadoop.mapred.InputSplit; getSplits(org.apache.hadoop.mapred.JobConf, int) throws java.io.IOException":{"name":"getSplits","returnType":"[Lorg.apache.hadoop.mapred.InputSplit;","args":["org.apache.hadoop.mapred.JobConf","int"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.mapred.JobConf":{"name":"org.apache.hadoop.mapred.JobConf","methods":{"void setInputFormat(java.lang.Class)":{"name":"setInputFormat","returnType":"void","args":["java.lang.Class"],"exceptions":[]},"void setUser(java.lang.String)":{"name":"setUser","returnType":"void","args":["java.lang.String"],"exceptions":[]},"int getMaxMapTaskFailuresPercent()":{"name":"getMaxMapTaskFailuresPercent","returnType":"int","args":[],"exceptions":[]},"void setMemoryForMapTask(long)":{"name":"setMemoryForMapTask","returnType":"void","args":["long"],"exceptions":[]},"org.apache.hadoop.io.RawComparator getOutputValueGroupingComparator()":{"name":"getOutputValueGroupingComparator","returnType":"org.apache.hadoop.io.RawComparator","args":[],"exceptions":[]},"void setOutputKeyClass(java.lang.Class)":{"name":"setOutputKeyClass","returnType":"void","args":["java.lang.Class"],"exceptions":[]},"void setProfileParams(java.lang.String)":{"name":"setProfileParams","returnType":"void","args":["java.lang.String"],"exceptions":[]},"int getMaxMapAttempts()":{"name":"getMaxMapAttempts","returnType":"int","args":[],"exceptions":[]},"void setNumMapTasks(int)":{"name":"setNumMapTasks","returnType":"void","args":["int"],"exceptions":[]},"java.lang.String getJobName()":{"name":"getJobName","returnType":"java.lang.String","args":[],"exceptions":[]},"java.lang.Class getMapOutputCompressorClass(java.lang.Class)":{"name":"getMapOutputCompressorClass","returnType":"java.lang.Class","args":["java.lang.Class"],"exceptions":[]},"java.lang.String getUser()":{"name":"getUser","returnType":"java.lang.String","args":[],"exceptions":[]},"void setMaxTaskFailuresPerTracker(int)":{"name":"setMaxTaskFailuresPerTracker","returnType":"void","args":["int"],"exceptions":[]},"void setCombinerClass(java.lang.Class)":{"name":"setCombinerClass","returnType":"void","args":["java.lang.Class"],"exceptions":[]},"java.lang.Class getMapOutputValueClass()":{"name":"getMapOutputValueClass","returnType":"java.lang.Class","args":[],"exceptions":[]},"void setKeepTaskFilesPattern(java.lang.String)":{"name":"setKeepTaskFilesPattern","returnType":"void","args":["java.lang.String"],"exceptions":[]},"java.lang.Class getMapOutputKeyClass()":{"name":"getMapOutputKeyClass","returnType":"java.lang.Class","args":[],"exceptions":[]},"org.apache.hadoop.conf.Configuration$IntegerRanges getProfileTaskRange(boolean)":{"name":"getProfileTaskRange","returnType":"org.apache.hadoop.conf.Configuration$IntegerRanges","args":["boolean"],"exceptions":[]},"boolean getUseNewReducer()":{"name":"getUseNewReducer","returnType":"boolean","args":[],"exceptions":[]},"void setUseNewMapper(boolean)":{"name":"setUseNewMapper","returnType":"void","args":["boolean"],"exceptions":[]},"java.lang.String getKeyFieldPartitionerOption()":{"name":"getKeyFieldPartitionerOption","returnType":"java.lang.String","args":[],"exceptions":[]},"boolean getProfileEnabled()":{"name":"getProfileEnabled","returnType":"boolean","args":[],"exceptions":[]},"void setOutputValueClass(java.lang.Class)":{"name":"setOutputValueClass","returnType":"void","args":["java.lang.Class"],"exceptions":[]},"void setReducerClass(java.lang.Class)":{"name":"setReducerClass","returnType":"void","args":["java.lang.Class"],"exceptions":[]},"void setKeyFieldPartitionerOptions(java.lang.String)":{"name":"setKeyFieldPartitionerOptions","returnType":"void","args":["java.lang.String"],"exceptions":[]},"void setMemoryForReduceTask(long)":{"name":"setMemoryForReduceTask","returnType":"void","args":["long"],"exceptions":[]},"void setProfileEnabled(boolean)":{"name":"setProfileEnabled","returnType":"void","args":["boolean"],"exceptions":[]},"org.apache.hadoop.io.RawComparator getCombinerKeyGroupingComparator()":{"name":"getCombinerKeyGroupingComparator","returnType":"org.apache.hadoop.io.RawComparator","args":[],"exceptions":[]},"void setCredentials(org.apache.hadoop.security.Credentials)":{"name":"setCredentials","returnType":"void","args":["org.apache.hadoop.security.Credentials"],"exceptions":[]},"void setOutputFormat(java.lang.Class)":{"name":"setOutputFormat","returnType":"void","args":["java.lang.Class"],"exceptions":[]},"int getNumTasksToExecutePerJvm()":{"name":"getNumTasksToExecutePerJvm","returnType":"int","args":[],"exceptions":[]},"java.lang.String findContainingJar(java.lang.Class)":{"name":"findContainingJar","returnType":"java.lang.String","args":["java.lang.Class"],"exceptions":[]},"void setMapOutputCompressorClass(java.lang.Class)":{"name":"setMapOutputCompressorClass","returnType":"void","args":["java.lang.Class"],"exceptions":[]},"boolean getCompressMapOutput()":{"name":"getCompressMapOutput","returnType":"boolean","args":[],"exceptions":[]},"org.apache.hadoop.io.RawComparator getOutputKeyComparator()":{"name":"getOutputKeyComparator","returnType":"org.apache.hadoop.io.RawComparator","args":[],"exceptions":[]},"void setJar(java.lang.String)":{"name":"setJar","returnType":"void","args":["java.lang.String"],"exceptions":[]},"boolean getKeepFailedTaskFiles()":{"name":"getKeepFailedTaskFiles","returnType":"boolean","args":[],"exceptions":[]},"java.lang.String getKeyFieldComparatorOption()":{"name":"getKeyFieldComparatorOption","returnType":"java.lang.String","args":[],"exceptions":[]},"void setSpeculativeExecution(boolean)":{"name":"setSpeculativeExecution","returnType":"void","args":["boolean"],"exceptions":[]},"java.lang.Class getPartitionerClass()":{"name":"getPartitionerClass","returnType":"java.lang.Class","args":[],"exceptions":[]},"java.lang.String getMapDebugScript()":{"name":"getMapDebugScript","returnType":"java.lang.String","args":[],"exceptions":[]},"java.lang.Class getCombinerClass()":{"name":"getCombinerClass","returnType":"java.lang.Class","args":[],"exceptions":[]},"void setMaxReduceAttempts(int)":{"name":"setMaxReduceAttempts","returnType":"void","args":["int"],"exceptions":[]},"void setMapOutputKeyClass(java.lang.Class)":{"name":"setMapOutputKeyClass","returnType":"void","args":["java.lang.Class"],"exceptions":[]},"int getMaxReduceAttempts()":{"name":"getMaxReduceAttempts","returnType":"int","args":[],"exceptions":[]},"[Ljava.lang.String; getLocalDirs() throws java.io.IOException":{"name":"getLocalDirs","returnType":"[Ljava.lang.String;","args":[],"exceptions":["java.io.IOException"]},"org.apache.hadoop.mapred.InputFormat getInputFormat()":{"name":"getInputFormat","returnType":"org.apache.hadoop.mapred.InputFormat","args":[],"exceptions":[]},"void setReduceDebugScript(java.lang.String)":{"name":"setReduceDebugScript","returnType":"void","args":["java.lang.String"],"exceptions":[]},"void setMapperClass(java.lang.Class)":{"name":"setMapperClass","returnType":"void","args":["java.lang.Class"],"exceptions":[]},"void setJobName(java.lang.String)":{"name":"setJobName","returnType":"void","args":["java.lang.String"],"exceptions":[]},"void setOutputCommitter(java.lang.Class)":{"name":"setOutputCommitter","returnType":"void","args":["java.lang.Class"],"exceptions":[]},"void setMapRunnerClass(java.lang.Class)":{"name":"setMapRunnerClass","returnType":"void","args":["java.lang.Class"],"exceptions":[]},"void setReduceSpeculativeExecution(boolean)":{"name":"setReduceSpeculativeExecution","returnType":"void","args":["boolean"],"exceptions":[]},"java.lang.String getJar()":{"name":"getJar","returnType":"java.lang.String","args":[],"exceptions":[]},"org.apache.hadoop.security.Credentials getCredentials()":{"name":"getCredentials","returnType":"org.apache.hadoop.security.Credentials","args":[],"exceptions":[]},"void setJobEndNotificationURI(java.lang.String)":{"name":"setJobEndNotificationURI","returnType":"void","args":["java.lang.String"],"exceptions":[]},"void setJarByClass(java.lang.Class)":{"name":"setJarByClass","returnType":"void","args":["java.lang.Class"],"exceptions":[]},"long normalizeMemoryConfigValue(long)":{"name":"normalizeMemoryConfigValue","returnType":"long","args":["long"],"exceptions":[]},"void setKeepFailedTaskFiles(boolean)":{"name":"setKeepFailedTaskFiles","returnType":"void","args":["boolean"],"exceptions":[]},"java.lang.String getReduceDebugScript()":{"name":"getReduceDebugScript","returnType":"java.lang.String","args":[],"exceptions":[]},"void setJobPriority(org.apache.hadoop.mapred.JobPriority)":{"name":"setJobPriority","returnType":"void","args":["org.apache.hadoop.mapred.JobPriority"],"exceptions":[]},"java.lang.Class getMapperClass()":{"name":"getMapperClass","returnType":"java.lang.Class","args":[],"exceptions":[]},"void setQueueName(java.lang.String)":{"name":"setQueueName","returnType":"void","args":["java.lang.String"],"exceptions":[]},"void setUseNewReducer(boolean)":{"name":"setUseNewReducer","returnType":"void","args":["boolean"],"exceptions":[]},"java.lang.Class getMapRunnerClass()":{"name":"getMapRunnerClass","returnType":"java.lang.Class","args":[],"exceptions":[]},"int getMaxTaskFailuresPerTracker()":{"name":"getMaxTaskFailuresPerTracker","returnType":"int","args":[],"exceptions":[]},"java.lang.String getProfileParams()":{"name":"getProfileParams","returnType":"java.lang.String","args":[],"exceptions":[]},"org.apache.hadoop.mapred.JobPriority getJobPriority()":{"name":"getJobPriority","returnType":"org.apache.hadoop.mapred.JobPriority","args":[],"exceptions":[]},"java.lang.String getQueueName()":{"name":"getQueueName","returnType":"java.lang.String","args":[],"exceptions":[]},"java.util.regex.Pattern getJarUnpackPattern()":{"name":"getJarUnpackPattern","returnType":"java.util.regex.Pattern","args":[],"exceptions":[]},"void setMapSpeculativeExecution(boolean)":{"name":"setMapSpeculativeExecution","returnType":"void","args":["boolean"],"exceptions":[]},"void setMaxMapAttempts(int)":{"name":"setMaxMapAttempts","returnType":"void","args":["int"],"exceptions":[]},"long getMaxVirtualMemoryForTask()":{"name":"getMaxVirtualMemoryForTask","returnType":"long","args":[],"exceptions":[]},"org.apache.hadoop.mapred.OutputFormat getOutputFormat()":{"name":"getOutputFormat","returnType":"org.apache.hadoop.mapred.OutputFormat","args":[],"exceptions":[]},"long getMemoryForMapTask()":{"name":"getMemoryForMapTask","returnType":"long","args":[],"exceptions":[]},"java.lang.String getSessionId()":{"name":"getSessionId","returnType":"java.lang.String","args":[],"exceptions":[]},"long getMaxPhysicalMemoryForTask()":{"name":"getMaxPhysicalMemoryForTask","returnType":"long","args":[],"exceptions":[]},"boolean getUseNewMapper()":{"name":"getUseNewMapper","returnType":"boolean","args":[],"exceptions":[]},"void setMaxMapTaskFailuresPercent(int)":{"name":"setMaxMapTaskFailuresPercent","returnType":"void","args":["int"],"exceptions":[]},"java.lang.String getKeepTaskFilesPattern()":{"name":"getKeepTaskFilesPattern","returnType":"java.lang.String","args":[],"exceptions":[]},"void setPartitionerClass(java.lang.Class)":{"name":"setPartitionerClass","returnType":"void","args":["java.lang.Class"],"exceptions":[]},"void setNumReduceTasks(int)":{"name":"setNumReduceTasks","returnType":"void","args":["int"],"exceptions":[]},"java.lang.Class getReducerClass()":{"name":"getReducerClass","returnType":"java.lang.Class","args":[],"exceptions":[]},"int getNumReduceTasks()":{"name":"getNumReduceTasks","returnType":"int","args":[],"exceptions":[]},"void setCombinerKeyGroupingComparator(java.lang.Class)":{"name":"setCombinerKeyGroupingComparator","returnType":"void","args":["java.lang.Class"],"exceptions":[]},"void setMaxReduceTaskFailuresPercent(int)":{"name":"setMaxReduceTaskFailuresPercent","returnType":"void","args":["int"],"exceptions":[]},"java.lang.String getJobLocalDir()":{"name":"getJobLocalDir","returnType":"java.lang.String","args":[],"exceptions":[]},"org.apache.hadoop.fs.Path getWorkingDirectory()":{"name":"getWorkingDirectory","returnType":"org.apache.hadoop.fs.Path","args":[],"exceptions":[]},"org.apache.hadoop.fs.Path getLocalPath(java.lang.String) throws java.io.IOException":{"name":"getLocalPath","returnType":"org.apache.hadoop.fs.Path","args":["java.lang.String"],"exceptions":["java.io.IOException"]},"void setWorkingDirectory(org.apache.hadoop.fs.Path)":{"name":"setWorkingDirectory","returnType":"void","args":["org.apache.hadoop.fs.Path"],"exceptions":[]},"int getNumMapTasks()":{"name":"getNumMapTasks","returnType":"int","args":[],"exceptions":[]},"void setOutputValueGroupingComparator(java.lang.Class)":{"name":"setOutputValueGroupingComparator","returnType":"void","args":["java.lang.Class"],"exceptions":[]},"boolean getReduceSpeculativeExecution()":{"name":"getReduceSpeculativeExecution","returnType":"boolean","args":[],"exceptions":[]},"boolean getMapSpeculativeExecution()":{"name":"getMapSpeculativeExecution","returnType":"boolean","args":[],"exceptions":[]},"void setCompressMapOutput(boolean)":{"name":"setCompressMapOutput","returnType":"void","args":["boolean"],"exceptions":[]},"void setMaxPhysicalMemoryForTask(long)":{"name":"setMaxPhysicalMemoryForTask","returnType":"void","args":["long"],"exceptions":[]},"java.lang.Class getOutputKeyClass()":{"name":"getOutputKeyClass","returnType":"java.lang.Class","args":[],"exceptions":[]},"void setMapOutputValueClass(java.lang.Class)":{"name":"setMapOutputValueClass","returnType":"void","args":["java.lang.Class"],"exceptions":[]},"void deleteLocalFiles() throws java.io.IOException":{"name":"deleteLocalFiles","returnType":"void","args":[],"exceptions":["java.io.IOException"]},"java.lang.String getJobEndNotificationURI()":{"name":"getJobEndNotificationURI","returnType":"java.lang.String","args":[],"exceptions":[]},"void setProfileTaskRange(boolean, java.lang.String)":{"name":"setProfileTaskRange","returnType":"void","args":["boolean","java.lang.String"],"exceptions":[]},"void setOutputKeyComparatorClass(java.lang.Class)":{"name":"setOutputKeyComparatorClass","returnType":"void","args":["java.lang.Class"],"exceptions":[]},"void setMapDebugScript(java.lang.String)":{"name":"setMapDebugScript","returnType":"void","args":["java.lang.String"],"exceptions":[]},"void setKeyFieldComparatorOptions(java.lang.String)":{"name":"setKeyFieldComparatorOptions","returnType":"void","args":["java.lang.String"],"exceptions":[]},"int getMaxReduceTaskFailuresPercent()":{"name":"getMaxReduceTaskFailuresPercent","returnType":"int","args":[],"exceptions":[]},"org.apache.hadoop.mapred.OutputCommitter getOutputCommitter()":{"name":"getOutputCommitter","returnType":"org.apache.hadoop.mapred.OutputCommitter","args":[],"exceptions":[]},"boolean getSpeculativeExecution()":{"name":"getSpeculativeExecution","returnType":"boolean","args":[],"exceptions":[]},"long getMemoryForReduceTask()":{"name":"getMemoryForReduceTask","returnType":"long","args":[],"exceptions":[]},"void deleteLocalFiles(java.lang.String) throws java.io.IOException":{"name":"deleteLocalFiles","returnType":"void","args":["java.lang.String"],"exceptions":["java.io.IOException"]},"void setNumTasksToExecutePerJvm(int)":{"name":"setNumTasksToExecutePerJvm","returnType":"void","args":["int"],"exceptions":[]},"void setSessionId(java.lang.String)":{"name":"setSessionId","returnType":"void","args":["java.lang.String"],"exceptions":[]},"void setMaxVirtualMemoryForTask(long)":{"name":"setMaxVirtualMemoryForTask","returnType":"void","args":["long"],"exceptions":[]},"java.lang.Class getOutputValueClass()":{"name":"getOutputValueClass","returnType":"java.lang.Class","args":[],"exceptions":[]}}},"org.apache.hadoop.mapreduce.lib.input.NLineInputFormat":{"name":"org.apache.hadoop.mapreduce.lib.input.NLineInputFormat","methods":{"void setNumLinesPerSplit(org.apache.hadoop.mapreduce.Job, int)":{"name":"setNumLinesPerSplit","returnType":"void","args":["org.apache.hadoop.mapreduce.Job","int"],"exceptions":[]},"int getNumLinesPerSplit(org.apache.hadoop.mapreduce.JobContext)":{"name":"getNumLinesPerSplit","returnType":"int","args":["org.apache.hadoop.mapreduce.JobContext"],"exceptions":[]},"java.util.List getSplitsForFile(org.apache.hadoop.fs.FileStatus, org.apache.hadoop.conf.Configuration, int) throws java.io.IOException":{"name":"getSplitsForFile","returnType":"java.util.List","args":["org.apache.hadoop.fs.FileStatus","org.apache.hadoop.conf.Configuration","int"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.mapreduce.RecordReader createRecordReader(org.apache.hadoop.mapreduce.InputSplit, org.apache.hadoop.mapreduce.TaskAttemptContext) throws java.io.IOException":{"name":"createRecordReader","returnType":"org.apache.hadoop.mapreduce.RecordReader","args":["org.apache.hadoop.mapreduce.InputSplit","org.apache.hadoop.mapreduce.TaskAttemptContext"],"exceptions":["java.io.IOException"]},"java.util.List getSplits(org.apache.hadoop.mapreduce.JobContext) throws java.io.IOException":{"name":"getSplits","returnType":"java.util.List","args":["org.apache.hadoop.mapreduce.JobContext"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.mapred.lib.TokenCountMapper":{"name":"org.apache.hadoop.mapred.lib.TokenCountMapper","methods":{"void map(java.lang.Object, java.lang.Object, org.apache.hadoop.mapred.OutputCollector, org.apache.hadoop.mapred.Reporter) throws java.io.IOException":{"name":"map","returnType":"void","args":["java.lang.Object","java.lang.Object","org.apache.hadoop.mapred.OutputCollector","org.apache.hadoop.mapred.Reporter"],"exceptions":["java.io.IOException"]},"void map(java.lang.Object, org.apache.hadoop.io.Text, org.apache.hadoop.mapred.OutputCollector, org.apache.hadoop.mapred.Reporter) throws java.io.IOException":{"name":"map","returnType":"void","args":["java.lang.Object","org.apache.hadoop.io.Text","org.apache.hadoop.mapred.OutputCollector","org.apache.hadoop.mapred.Reporter"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.mapred.MapRunnable":{"name":"org.apache.hadoop.mapred.MapRunnable","methods":{"void run(org.apache.hadoop.mapred.RecordReader, org.apache.hadoop.mapred.OutputCollector, org.apache.hadoop.mapred.Reporter) throws java.io.IOException":{"name":"run","returnType":"void","args":["org.apache.hadoop.mapred.RecordReader","org.apache.hadoop.mapred.OutputCollector","org.apache.hadoop.mapred.Reporter"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.mapred.JobClient":{"name":"org.apache.hadoop.mapred.JobClient","methods":{"[Lorg.apache.hadoop.mapred.TaskReport; getMapTaskReports(java.lang.String) throws java.io.IOException":{"name":"getMapTaskReports","returnType":"[Lorg.apache.hadoop.mapred.TaskReport;","args":["java.lang.String"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.mapred.RunningJob getJob(java.lang.String) throws java.io.IOException":{"name":"getJob","returnType":"org.apache.hadoop.mapred.RunningJob","args":["java.lang.String"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.Path getStagingAreaDir() throws java.io.IOException":{"name":"getStagingAreaDir","returnType":"org.apache.hadoop.fs.Path","args":[],"exceptions":["java.io.IOException"]},"org.apache.hadoop.mapred.JobQueueInfo getQueueInfo(java.lang.String) throws java.io.IOException":{"name":"getQueueInfo","returnType":"org.apache.hadoop.mapred.JobQueueInfo","args":["java.lang.String"],"exceptions":["java.io.IOException"]},"[Lorg.apache.hadoop.mapred.TaskReport; getSetupTaskReports(org.apache.hadoop.mapred.JobID) throws java.io.IOException":{"name":"getSetupTaskReports","returnType":"[Lorg.apache.hadoop.mapred.TaskReport;","args":["org.apache.hadoop.mapred.JobID"],"exceptions":["java.io.IOException"]},"void displayTasks(org.apache.hadoop.mapred.JobID, java.lang.String, java.lang.String) throws java.io.IOException":{"name":"displayTasks","returnType":"void","args":["org.apache.hadoop.mapred.JobID","java.lang.String","java.lang.String"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.mapred.RunningJob submitJobInternal(org.apache.hadoop.mapred.JobConf) throws java.io.IOException, java.io.FileNotFoundException":{"name":"submitJobInternal","returnType":"org.apache.hadoop.mapred.RunningJob","args":["org.apache.hadoop.mapred.JobConf"],"exceptions":["java.io.IOException","java.io.FileNotFoundException"]},"long renewDelegationToken(org.apache.hadoop.security.token.Token) throws java.lang.InterruptedException, org.apache.hadoop.security.token.SecretManager$InvalidToken, java.io.IOException":{"name":"renewDelegationToken","returnType":"long","args":["org.apache.hadoop.security.token.Token"],"exceptions":["java.lang.InterruptedException","org.apache.hadoop.security.token.SecretManager$InvalidToken","java.io.IOException"]},"org.apache.hadoop.mapred.ClusterStatus getClusterStatus() throws java.io.IOException":{"name":"getClusterStatus","returnType":"org.apache.hadoop.mapred.ClusterStatus","args":[],"exceptions":["java.io.IOException"]},"[Lorg.apache.hadoop.mapred.TaskReport; getReduceTaskReports(java.lang.String) throws java.io.IOException":{"name":"getReduceTaskReports","returnType":"[Lorg.apache.hadoop.mapred.TaskReport;","args":["java.lang.String"],"exceptions":["java.io.IOException"]},"[Lorg.apache.hadoop.mapred.JobQueueInfo; getQueues() throws java.io.IOException":{"name":"getQueues","returnType":"[Lorg.apache.hadoop.mapred.JobQueueInfo;","args":[],"exceptions":["java.io.IOException"]},"[Lorg.apache.hadoop.mapred.JobStatus; getJobsFromQueue(java.lang.String) throws java.io.IOException":{"name":"getJobsFromQueue","returnType":"[Lorg.apache.hadoop.mapred.JobStatus;","args":["java.lang.String"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.mapreduce.Cluster getClusterHandle()":{"name":"getClusterHandle","returnType":"org.apache.hadoop.mapreduce.Cluster","args":[],"exceptions":[]},"org.apache.hadoop.mapred.RunningJob submitJob(org.apache.hadoop.mapred.JobConf) throws java.io.IOException, java.io.FileNotFoundException":{"name":"submitJob","returnType":"org.apache.hadoop.mapred.RunningJob","args":["org.apache.hadoop.mapred.JobConf"],"exceptions":["java.io.IOException","java.io.FileNotFoundException"]},"[Lorg.apache.hadoop.mapred.QueueAclsInfo; getQueueAclsForCurrentUser() throws java.io.IOException":{"name":"getQueueAclsForCurrentUser","returnType":"[Lorg.apache.hadoop.mapred.QueueAclsInfo;","args":[],"exceptions":["java.io.IOException"]},"[Lorg.apache.hadoop.mapred.TaskReport; getReduceTaskReports(org.apache.hadoop.mapred.JobID) throws java.io.IOException":{"name":"getReduceTaskReports","returnType":"[Lorg.apache.hadoop.mapred.TaskReport;","args":["org.apache.hadoop.mapred.JobID"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.security.token.Token getDelegationToken(org.apache.hadoop.io.Text) throws java.lang.InterruptedException, java.io.IOException":{"name":"getDelegationToken","returnType":"org.apache.hadoop.security.token.Token","args":["org.apache.hadoop.io.Text"],"exceptions":["java.lang.InterruptedException","java.io.IOException"]},"org.apache.hadoop.fs.Path getSystemDir()":{"name":"getSystemDir","returnType":"org.apache.hadoop.fs.Path","args":[],"exceptions":[]},"void main([Ljava.lang.String;) throws java.lang.Exception":{"name":"main","returnType":"void","args":["[Ljava.lang.String;"],"exceptions":["java.lang.Exception"]},"boolean monitorAndPrintJob(org.apache.hadoop.mapred.JobConf, org.apache.hadoop.mapred.RunningJob) throws java.lang.InterruptedException, java.io.IOException":{"name":"monitorAndPrintJob","returnType":"boolean","args":["org.apache.hadoop.mapred.JobConf","org.apache.hadoop.mapred.RunningJob"],"exceptions":["java.lang.InterruptedException","java.io.IOException"]},"[Lorg.apache.hadoop.mapred.JobQueueInfo; getRootQueues() throws java.io.IOException":{"name":"getRootQueues","returnType":"[Lorg.apache.hadoop.mapred.JobQueueInfo;","args":[],"exceptions":["java.io.IOException"]},"org.apache.hadoop.mapred.RunningJob submitJob(java.lang.String) throws org.apache.hadoop.mapred.InvalidJobConfException, java.io.IOException, java.io.FileNotFoundException":{"name":"submitJob","returnType":"org.apache.hadoop.mapred.RunningJob","args":["java.lang.String"],"exceptions":["org.apache.hadoop.mapred.InvalidJobConfException","java.io.IOException","java.io.FileNotFoundException"]},"[Lorg.apache.hadoop.mapred.JobStatus; getAllJobs() throws java.io.IOException":{"name":"getAllJobs","returnType":"[Lorg.apache.hadoop.mapred.JobStatus;","args":[],"exceptions":["java.io.IOException"]},"org.apache.hadoop.mapred.RunningJob getJob(org.apache.hadoop.mapred.JobID) throws java.io.IOException":{"name":"getJob","returnType":"org.apache.hadoop.mapred.RunningJob","args":["org.apache.hadoop.mapred.JobID"],"exceptions":["java.io.IOException"]},"void init(org.apache.hadoop.mapred.JobConf) throws java.io.IOException":{"name":"init","returnType":"void","args":["org.apache.hadoop.mapred.JobConf"],"exceptions":["java.io.IOException"]},"void setTaskOutputFilter(org.apache.hadoop.mapred.JobClient$TaskStatusFilter)":{"name":"setTaskOutputFilter","returnType":"void","args":["org.apache.hadoop.mapred.JobClient$TaskStatusFilter"],"exceptions":[]},"[Lorg.apache.hadoop.mapred.JobStatus; jobsToComplete() throws java.io.IOException":{"name":"jobsToComplete","returnType":"[Lorg.apache.hadoop.mapred.JobStatus;","args":[],"exceptions":["java.io.IOException"]},"boolean isJobDirValid(org.apache.hadoop.fs.Path, org.apache.hadoop.fs.FileSystem) throws java.io.IOException":{"name":"isJobDirValid","returnType":"boolean","args":["org.apache.hadoop.fs.Path","org.apache.hadoop.fs.FileSystem"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.mapred.JobClient$TaskStatusFilter getTaskOutputFilter()":{"name":"getTaskOutputFilter","returnType":"org.apache.hadoop.mapred.JobClient$TaskStatusFilter","args":[],"exceptions":[]},"[Lorg.apache.hadoop.mapred.JobQueueInfo; getChildQueues(java.lang.String) throws java.io.IOException":{"name":"getChildQueues","returnType":"[Lorg.apache.hadoop.mapred.JobQueueInfo;","args":["java.lang.String"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.mapred.ClusterStatus getClusterStatus(boolean) throws java.io.IOException":{"name":"getClusterStatus","returnType":"org.apache.hadoop.mapred.ClusterStatus","args":["boolean"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.FileSystem getFs() throws java.io.IOException":{"name":"getFs","returnType":"org.apache.hadoop.fs.FileSystem","args":[],"exceptions":["java.io.IOException"]},"org.apache.hadoop.mapred.JobClient$TaskStatusFilter getTaskOutputFilter(org.apache.hadoop.mapred.JobConf)":{"name":"getTaskOutputFilter","returnType":"org.apache.hadoop.mapred.JobClient$TaskStatusFilter","args":["org.apache.hadoop.mapred.JobConf"],"exceptions":[]},"[Lorg.apache.hadoop.mapred.TaskReport; getCleanupTaskReports(org.apache.hadoop.mapred.JobID) throws java.io.IOException":{"name":"getCleanupTaskReports","returnType":"[Lorg.apache.hadoop.mapred.TaskReport;","args":["org.apache.hadoop.mapred.JobID"],"exceptions":["java.io.IOException"]},"void setTaskOutputFilter(org.apache.hadoop.mapred.JobConf, org.apache.hadoop.mapred.JobClient$TaskStatusFilter)":{"name":"setTaskOutputFilter","returnType":"void","args":["org.apache.hadoop.mapred.JobConf","org.apache.hadoop.mapred.JobClient$TaskStatusFilter"],"exceptions":[]},"int getDefaultReduces() throws java.io.IOException":{"name":"getDefaultReduces","returnType":"int","args":[],"exceptions":["java.io.IOException"]},"org.apache.hadoop.mapred.RunningJob runJob(org.apache.hadoop.mapred.JobConf) throws java.io.IOException":{"name":"runJob","returnType":"org.apache.hadoop.mapred.RunningJob","args":["org.apache.hadoop.mapred.JobConf"],"exceptions":["java.io.IOException"]},"int getDefaultMaps() throws java.io.IOException":{"name":"getDefaultMaps","returnType":"int","args":[],"exceptions":["java.io.IOException"]},"[Lorg.apache.hadoop.mapred.TaskReport; getMapTaskReports(org.apache.hadoop.mapred.JobID) throws java.io.IOException":{"name":"getMapTaskReports","returnType":"[Lorg.apache.hadoop.mapred.TaskReport;","args":["org.apache.hadoop.mapred.JobID"],"exceptions":["java.io.IOException"]},"void close() throws java.io.IOException":{"name":"close","returnType":"void","args":[],"exceptions":["java.io.IOException"]},"void cancelDelegationToken(org.apache.hadoop.security.token.Token) throws java.lang.InterruptedException, org.apache.hadoop.security.token.SecretManager$InvalidToken, java.io.IOException":{"name":"cancelDelegationToken","returnType":"void","args":["org.apache.hadoop.security.token.Token"],"exceptions":["java.lang.InterruptedException","org.apache.hadoop.security.token.SecretManager$InvalidToken","java.io.IOException"]}}},"org.apache.hadoop.mapreduce.RecordWriter":{"name":"org.apache.hadoop.mapreduce.RecordWriter","methods":{"void close(org.apache.hadoop.mapreduce.TaskAttemptContext) throws java.lang.InterruptedException, java.io.IOException":{"name":"close","returnType":"void","args":["org.apache.hadoop.mapreduce.TaskAttemptContext"],"exceptions":["java.lang.InterruptedException","java.io.IOException"]},"void write(java.lang.Object, java.lang.Object) throws java.lang.InterruptedException, java.io.IOException":{"name":"write","returnType":"void","args":["java.lang.Object","java.lang.Object"],"exceptions":["java.lang.InterruptedException","java.io.IOException"]}}},"org.apache.hadoop.mapred.lib.FieldSelectionMapReduce":{"name":"org.apache.hadoop.mapred.lib.FieldSelectionMapReduce","methods":{"void map(java.lang.Object, java.lang.Object, org.apache.hadoop.mapred.OutputCollector, org.apache.hadoop.mapred.Reporter) throws java.io.IOException":{"name":"map","returnType":"void","args":["java.lang.Object","java.lang.Object","org.apache.hadoop.mapred.OutputCollector","org.apache.hadoop.mapred.Reporter"],"exceptions":["java.io.IOException"]},"void reduce(java.lang.Object, java.util.Iterator, org.apache.hadoop.mapred.OutputCollector, org.apache.hadoop.mapred.Reporter) throws java.io.IOException":{"name":"reduce","returnType":"void","args":["java.lang.Object","java.util.Iterator","org.apache.hadoop.mapred.OutputCollector","org.apache.hadoop.mapred.Reporter"],"exceptions":["java.io.IOException"]},"void reduce(org.apache.hadoop.io.Text, java.util.Iterator, org.apache.hadoop.mapred.OutputCollector, org.apache.hadoop.mapred.Reporter) throws java.io.IOException":{"name":"reduce","returnType":"void","args":["org.apache.hadoop.io.Text","java.util.Iterator","org.apache.hadoop.mapred.OutputCollector","org.apache.hadoop.mapred.Reporter"],"exceptions":["java.io.IOException"]},"void configure(org.apache.hadoop.mapred.JobConf)":{"name":"configure","returnType":"void","args":["org.apache.hadoop.mapred.JobConf"],"exceptions":[]},"void close() throws java.io.IOException":{"name":"close","returnType":"void","args":[],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.mapred.KeyValueLineRecordReader":{"name":"org.apache.hadoop.mapred.KeyValueLineRecordReader","methods":{"int findSeparator([B, int, int, byte)":{"name":"findSeparator","returnType":"int","args":["[B","int","int","byte"],"exceptions":[]},"java.lang.Class getKeyClass()":{"name":"getKeyClass","returnType":"java.lang.Class","args":[],"exceptions":[]},"boolean next(org.apache.hadoop.io.Text, org.apache.hadoop.io.Text) throws java.io.IOException":{"name":"next","returnType":"boolean","args":["org.apache.hadoop.io.Text","org.apache.hadoop.io.Text"],"exceptions":["java.io.IOException"]},"long getPos() throws java.io.IOException":{"name":"getPos","returnType":"long","args":[],"exceptions":["java.io.IOException"]},"float getProgress() throws java.io.IOException":{"name":"getProgress","returnType":"float","args":[],"exceptions":["java.io.IOException"]},"java.lang.Object createKey()":{"name":"createKey","returnType":"java.lang.Object","args":[],"exceptions":[]},"java.lang.Object createValue()":{"name":"createValue","returnType":"java.lang.Object","args":[],"exceptions":[]},"org.apache.hadoop.io.Text createValue()":{"name":"createValue","returnType":"org.apache.hadoop.io.Text","args":[],"exceptions":[]},"org.apache.hadoop.io.Text createKey()":{"name":"createKey","returnType":"org.apache.hadoop.io.Text","args":[],"exceptions":[]},"boolean next(java.lang.Object, java.lang.Object) throws java.io.IOException":{"name":"next","returnType":"boolean","args":["java.lang.Object","java.lang.Object"],"exceptions":["java.io.IOException"]},"void close() throws java.io.IOException":{"name":"close","returnType":"void","args":[],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.mapreduce.lib.aggregate.UniqValueCount":{"name":"org.apache.hadoop.mapreduce.lib.aggregate.UniqValueCount","methods":{"java.util.Set getUniqueItems()":{"name":"getUniqueItems","returnType":"java.util.Set","args":[],"exceptions":[]},"void reset()":{"name":"reset","returnType":"void","args":[],"exceptions":[]},"long setMaxItems(long)":{"name":"setMaxItems","returnType":"long","args":["long"],"exceptions":[]},"java.util.ArrayList getCombinerOutput()":{"name":"getCombinerOutput","returnType":"java.util.ArrayList","args":[],"exceptions":[]},"java.lang.String getReport()":{"name":"getReport","returnType":"java.lang.String","args":[],"exceptions":[]},"void addNextValue(java.lang.Object)":{"name":"addNextValue","returnType":"void","args":["java.lang.Object"],"exceptions":[]}}},"org.apache.hadoop.mapreduce.lib.fieldsel.FieldSelectionHelper":{"name":"org.apache.hadoop.mapreduce.lib.fieldsel.FieldSelectionHelper","methods":{"java.lang.String specToString(java.lang.String, java.lang.String, int, java.util.List, java.util.List)":{"name":"specToString","returnType":"java.lang.String","args":["java.lang.String","java.lang.String","int","java.util.List","java.util.List"],"exceptions":[]},"org.apache.hadoop.io.Text getKey()":{"name":"getKey","returnType":"org.apache.hadoop.io.Text","args":[],"exceptions":[]},"org.apache.hadoop.io.Text getValue()":{"name":"getValue","returnType":"org.apache.hadoop.io.Text","args":[],"exceptions":[]},"int parseOutputKeyValueSpec(java.lang.String, java.util.List, java.util.List)":{"name":"parseOutputKeyValueSpec","returnType":"int","args":["java.lang.String","java.util.List","java.util.List"],"exceptions":[]},"void extractOutputKeyValue(java.lang.String, java.lang.String, java.lang.String, java.util.List, java.util.List, int, boolean, boolean)":{"name":"extractOutputKeyValue","returnType":"void","args":["java.lang.String","java.lang.String","java.lang.String","java.util.List","java.util.List","int","boolean","boolean"],"exceptions":[]}}},"org.apache.hadoop.mapred.MapFileOutputFormat":{"name":"org.apache.hadoop.mapred.MapFileOutputFormat","methods":{"[Lorg.apache.hadoop.io.MapFile$Reader; getReaders(org.apache.hadoop.fs.FileSystem, org.apache.hadoop.fs.Path, org.apache.hadoop.conf.Configuration) throws java.io.IOException":{"name":"getReaders","returnType":"[Lorg.apache.hadoop.io.MapFile$Reader;","args":["org.apache.hadoop.fs.FileSystem","org.apache.hadoop.fs.Path","org.apache.hadoop.conf.Configuration"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.io.Writable getEntry([Lorg.apache.hadoop.io.MapFile$Reader;, org.apache.hadoop.mapred.Partitioner, org.apache.hadoop.io.WritableComparable, org.apache.hadoop.io.Writable) throws java.io.IOException":{"name":"getEntry","returnType":"org.apache.hadoop.io.Writable","args":["[Lorg.apache.hadoop.io.MapFile$Reader;","org.apache.hadoop.mapred.Partitioner","org.apache.hadoop.io.WritableComparable","org.apache.hadoop.io.Writable"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.mapred.RecordWriter getRecordWriter(org.apache.hadoop.fs.FileSystem, org.apache.hadoop.mapred.JobConf, java.lang.String, org.apache.hadoop.util.Progressable) throws java.io.IOException":{"name":"getRecordWriter","returnType":"org.apache.hadoop.mapred.RecordWriter","args":["org.apache.hadoop.fs.FileSystem","org.apache.hadoop.mapred.JobConf","java.lang.String","org.apache.hadoop.util.Progressable"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.mapred.Utils":{"name":"org.apache.hadoop.mapred.Utils","methods":{}},"org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter":{"name":"org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter","methods":{"org.apache.hadoop.fs.Path getWorkPath() throws java.io.IOException":{"name":"getWorkPath","returnType":"org.apache.hadoop.fs.Path","args":[],"exceptions":["java.io.IOException"]},"void commitJob(org.apache.hadoop.mapreduce.JobContext) throws java.io.IOException":{"name":"commitJob","returnType":"void","args":["org.apache.hadoop.mapreduce.JobContext"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.Path getJobAttemptPath(org.apache.hadoop.mapreduce.JobContext)":{"name":"getJobAttemptPath","returnType":"org.apache.hadoop.fs.Path","args":["org.apache.hadoop.mapreduce.JobContext"],"exceptions":[]},"org.apache.hadoop.fs.Path getCommittedTaskPath(org.apache.hadoop.mapreduce.TaskAttemptContext, org.apache.hadoop.fs.Path)":{"name":"getCommittedTaskPath","returnType":"org.apache.hadoop.fs.Path","args":["org.apache.hadoop.mapreduce.TaskAttemptContext","org.apache.hadoop.fs.Path"],"exceptions":[]},"void commitTask(org.apache.hadoop.mapreduce.TaskAttemptContext, org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"commitTask","returnType":"void","args":["org.apache.hadoop.mapreduce.TaskAttemptContext","org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.Path getJobAttemptPath(org.apache.hadoop.mapreduce.JobContext, org.apache.hadoop.fs.Path)":{"name":"getJobAttemptPath","returnType":"org.apache.hadoop.fs.Path","args":["org.apache.hadoop.mapreduce.JobContext","org.apache.hadoop.fs.Path"],"exceptions":[]},"void abortTask(org.apache.hadoop.mapreduce.TaskAttemptContext) throws java.io.IOException":{"name":"abortTask","returnType":"void","args":["org.apache.hadoop.mapreduce.TaskAttemptContext"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.Path getTaskAttemptPath(org.apache.hadoop.mapreduce.TaskAttemptContext, org.apache.hadoop.fs.Path)":{"name":"getTaskAttemptPath","returnType":"org.apache.hadoop.fs.Path","args":["org.apache.hadoop.mapreduce.TaskAttemptContext","org.apache.hadoop.fs.Path"],"exceptions":[]},"void recoverTask(org.apache.hadoop.mapreduce.TaskAttemptContext) throws java.io.IOException":{"name":"recoverTask","returnType":"void","args":["org.apache.hadoop.mapreduce.TaskAttemptContext"],"exceptions":["java.io.IOException"]},"void cleanupJob(org.apache.hadoop.mapreduce.JobContext) throws java.io.IOException":{"name":"cleanupJob","returnType":"void","args":["org.apache.hadoop.mapreduce.JobContext"],"exceptions":["java.io.IOException"]},"void abortJob(org.apache.hadoop.mapreduce.JobContext, org.apache.hadoop.mapreduce.JobStatus$State) throws java.io.IOException":{"name":"abortJob","returnType":"void","args":["org.apache.hadoop.mapreduce.JobContext","org.apache.hadoop.mapreduce.JobStatus$State"],"exceptions":["java.io.IOException"]},"boolean isCommitJobRepeatable(org.apache.hadoop.mapreduce.JobContext) throws java.io.IOException":{"name":"isCommitJobRepeatable","returnType":"boolean","args":["org.apache.hadoop.mapreduce.JobContext"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.Path getTaskAttemptPath(org.apache.hadoop.mapreduce.TaskAttemptContext)":{"name":"getTaskAttemptPath","returnType":"org.apache.hadoop.fs.Path","args":["org.apache.hadoop.mapreduce.TaskAttemptContext"],"exceptions":[]},"org.apache.hadoop.fs.Path getCommittedTaskPath(org.apache.hadoop.mapreduce.TaskAttemptContext)":{"name":"getCommittedTaskPath","returnType":"org.apache.hadoop.fs.Path","args":["org.apache.hadoop.mapreduce.TaskAttemptContext"],"exceptions":[]},"boolean needsTaskCommit(org.apache.hadoop.mapreduce.TaskAttemptContext, org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"needsTaskCommit","returnType":"boolean","args":["org.apache.hadoop.mapreduce.TaskAttemptContext","org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"boolean needsTaskCommit(org.apache.hadoop.mapreduce.TaskAttemptContext) throws java.io.IOException":{"name":"needsTaskCommit","returnType":"boolean","args":["org.apache.hadoop.mapreduce.TaskAttemptContext"],"exceptions":["java.io.IOException"]},"void setupJob(org.apache.hadoop.mapreduce.JobContext) throws java.io.IOException":{"name":"setupJob","returnType":"void","args":["org.apache.hadoop.mapreduce.JobContext"],"exceptions":["java.io.IOException"]},"void setupTask(org.apache.hadoop.mapreduce.TaskAttemptContext) throws java.io.IOException":{"name":"setupTask","returnType":"void","args":["org.apache.hadoop.mapreduce.TaskAttemptContext"],"exceptions":["java.io.IOException"]},"void commitTask(org.apache.hadoop.mapreduce.TaskAttemptContext) throws java.io.IOException":{"name":"commitTask","returnType":"void","args":["org.apache.hadoop.mapreduce.TaskAttemptContext"],"exceptions":["java.io.IOException"]},"void abortTask(org.apache.hadoop.mapreduce.TaskAttemptContext, org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"abortTask","returnType":"void","args":["org.apache.hadoop.mapreduce.TaskAttemptContext","org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"boolean isRecoverySupported()":{"name":"isRecoverySupported","returnType":"boolean","args":[],"exceptions":[]}}},"org.apache.hadoop.mapreduce.lib.aggregate.LongValueMax":{"name":"org.apache.hadoop.mapreduce.lib.aggregate.LongValueMax","methods":{"void reset()":{"name":"reset","returnType":"void","args":[],"exceptions":[]},"long getVal()":{"name":"getVal","returnType":"long","args":[],"exceptions":[]},"java.util.ArrayList getCombinerOutput()":{"name":"getCombinerOutput","returnType":"java.util.ArrayList","args":[],"exceptions":[]},"void addNextValue(long)":{"name":"addNextValue","returnType":"void","args":["long"],"exceptions":[]},"java.lang.String getReport()":{"name":"getReport","returnType":"java.lang.String","args":[],"exceptions":[]},"void addNextValue(java.lang.Object)":{"name":"addNextValue","returnType":"void","args":["java.lang.Object"],"exceptions":[]}}},"org.apache.hadoop.mapred.FileSplit":{"name":"org.apache.hadoop.mapred.FileSplit","methods":{"long getStart()":{"name":"getStart","returnType":"long","args":[],"exceptions":[]},"[Ljava.lang.String; getLocations() throws java.io.IOException":{"name":"getLocations","returnType":"[Ljava.lang.String;","args":[],"exceptions":["java.io.IOException"]},"[Lorg.apache.hadoop.mapred.SplitLocationInfo; getLocationInfo() throws java.io.IOException":{"name":"getLocationInfo","returnType":"[Lorg.apache.hadoop.mapred.SplitLocationInfo;","args":[],"exceptions":["java.io.IOException"]},"long getLength()":{"name":"getLength","returnType":"long","args":[],"exceptions":[]},"java.lang.String toString()":{"name":"toString","returnType":"java.lang.String","args":[],"exceptions":[]},"void write(java.io.DataOutput) throws java.io.IOException":{"name":"write","returnType":"void","args":["java.io.DataOutput"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.Path getPath()":{"name":"getPath","returnType":"org.apache.hadoop.fs.Path","args":[],"exceptions":[]},"void readFields(java.io.DataInput) throws java.io.IOException":{"name":"readFields","returnType":"void","args":["java.io.DataInput"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.mapred.SequenceFileInputFormat":{"name":"org.apache.hadoop.mapred.SequenceFileInputFormat","methods":{"org.apache.hadoop.mapred.RecordReader getRecordReader(org.apache.hadoop.mapred.InputSplit, org.apache.hadoop.mapred.JobConf, org.apache.hadoop.mapred.Reporter) throws java.io.IOException":{"name":"getRecordReader","returnType":"org.apache.hadoop.mapred.RecordReader","args":["org.apache.hadoop.mapred.InputSplit","org.apache.hadoop.mapred.JobConf","org.apache.hadoop.mapred.Reporter"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.mapred.TaskCompletionEvent":{"name":"org.apache.hadoop.mapred.TaskCompletionEvent","methods":{"void setTaskID(org.apache.hadoop.mapred.TaskAttemptID)":{"name":"setTaskID","returnType":"void","args":["org.apache.hadoop.mapred.TaskAttemptID"],"exceptions":[]},"void setTaskRunTime(int)":{"name":"setTaskRunTime","returnType":"void","args":["int"],"exceptions":[]},"org.apache.hadoop.mapreduce.TaskAttemptID getTaskAttemptId()":{"name":"getTaskAttemptId","returnType":"org.apache.hadoop.mapreduce.TaskAttemptID","args":[],"exceptions":[]},"org.apache.hadoop.mapred.TaskAttemptID getTaskAttemptId()":{"name":"getTaskAttemptId","returnType":"org.apache.hadoop.mapred.TaskAttemptID","args":[],"exceptions":[]},"void setTaskId(java.lang.String)":{"name":"setTaskId","returnType":"void","args":["java.lang.String"],"exceptions":[]},"void setTaskTrackerHttp(java.lang.String)":{"name":"setTaskTrackerHttp","returnType":"void","args":["java.lang.String"],"exceptions":[]},"void setEventId(int)":{"name":"setEventId","returnType":"void","args":["int"],"exceptions":[]},"org.apache.hadoop.mapred.TaskCompletionEvent downgrade(org.apache.hadoop.mapreduce.TaskCompletionEvent)":{"name":"downgrade","returnType":"org.apache.hadoop.mapred.TaskCompletionEvent","args":["org.apache.hadoop.mapreduce.TaskCompletionEvent"],"exceptions":[]},"java.lang.String getTaskId()":{"name":"getTaskId","returnType":"java.lang.String","args":[],"exceptions":[]},"void setTaskStatus(org.apache.hadoop.mapred.TaskCompletionEvent$Status)":{"name":"setTaskStatus","returnType":"void","args":["org.apache.hadoop.mapred.TaskCompletionEvent$Status"],"exceptions":[]},"org.apache.hadoop.mapred.TaskCompletionEvent$Status getTaskStatus()":{"name":"getTaskStatus","returnType":"org.apache.hadoop.mapred.TaskCompletionEvent$Status","args":[],"exceptions":[]}}},"org.apache.hadoop.mapred.lib.IdentityMapper":{"name":"org.apache.hadoop.mapred.lib.IdentityMapper","methods":{"void map(java.lang.Object, java.lang.Object, org.apache.hadoop.mapred.OutputCollector, org.apache.hadoop.mapred.Reporter) throws java.io.IOException":{"name":"map","returnType":"void","args":["java.lang.Object","java.lang.Object","org.apache.hadoop.mapred.OutputCollector","org.apache.hadoop.mapred.Reporter"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.mapred.SequenceFileAsBinaryInputFormat":{"name":"org.apache.hadoop.mapred.SequenceFileAsBinaryInputFormat","methods":{"org.apache.hadoop.mapred.RecordReader getRecordReader(org.apache.hadoop.mapred.InputSplit, org.apache.hadoop.mapred.JobConf, org.apache.hadoop.mapred.Reporter) throws java.io.IOException":{"name":"getRecordReader","returnType":"org.apache.hadoop.mapred.RecordReader","args":["org.apache.hadoop.mapred.InputSplit","org.apache.hadoop.mapred.JobConf","org.apache.hadoop.mapred.Reporter"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.mapred.lib.aggregate.ValueAggregatorReducer":{"name":"org.apache.hadoop.mapred.lib.aggregate.ValueAggregatorReducer","methods":{"void map(java.lang.Object, java.lang.Object, org.apache.hadoop.mapred.OutputCollector, org.apache.hadoop.mapred.Reporter) throws java.io.IOException":{"name":"map","returnType":"void","args":["java.lang.Object","java.lang.Object","org.apache.hadoop.mapred.OutputCollector","org.apache.hadoop.mapred.Reporter"],"exceptions":["java.io.IOException"]},"void reduce(java.lang.Object, java.util.Iterator, org.apache.hadoop.mapred.OutputCollector, org.apache.hadoop.mapred.Reporter) throws java.io.IOException":{"name":"reduce","returnType":"void","args":["java.lang.Object","java.util.Iterator","org.apache.hadoop.mapred.OutputCollector","org.apache.hadoop.mapred.Reporter"],"exceptions":["java.io.IOException"]},"void map(org.apache.hadoop.io.WritableComparable, org.apache.hadoop.io.Writable, org.apache.hadoop.mapred.OutputCollector, org.apache.hadoop.mapred.Reporter) throws java.io.IOException":{"name":"map","returnType":"void","args":["org.apache.hadoop.io.WritableComparable","org.apache.hadoop.io.Writable","org.apache.hadoop.mapred.OutputCollector","org.apache.hadoop.mapred.Reporter"],"exceptions":["java.io.IOException"]},"void reduce(org.apache.hadoop.io.Text, java.util.Iterator, org.apache.hadoop.mapred.OutputCollector, org.apache.hadoop.mapred.Reporter) throws java.io.IOException":{"name":"reduce","returnType":"void","args":["org.apache.hadoop.io.Text","java.util.Iterator","org.apache.hadoop.mapred.OutputCollector","org.apache.hadoop.mapred.Reporter"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.mapreduce.Counters":{"name":"org.apache.hadoop.mapreduce.Counters","methods":{}},"org.apache.hadoop.mapred.join.WrappedRecordReader":{"name":"org.apache.hadoop.mapred.join.WrappedRecordReader","methods":{"int hashCode()":{"name":"hashCode","returnType":"int","args":[],"exceptions":[]},"int id()":{"name":"id","returnType":"int","args":[],"exceptions":[]},"long getPos() throws java.io.IOException":{"name":"getPos","returnType":"long","args":[],"exceptions":["java.io.IOException"]},"float getProgress() throws java.io.IOException":{"name":"getProgress","returnType":"float","args":[],"exceptions":["java.io.IOException"]},"org.apache.hadoop.conf.Configuration getConf()":{"name":"getConf","returnType":"org.apache.hadoop.conf.Configuration","args":[],"exceptions":[]},"java.lang.Object createValue()":{"name":"createValue","returnType":"java.lang.Object","args":[],"exceptions":[]},"void accept(org.apache.hadoop.mapred.join.CompositeRecordReader$JoinCollector, org.apache.hadoop.io.WritableComparable) throws java.io.IOException":{"name":"accept","returnType":"void","args":["org.apache.hadoop.mapred.join.CompositeRecordReader$JoinCollector","org.apache.hadoop.io.WritableComparable"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.io.WritableComparable key()":{"name":"key","returnType":"org.apache.hadoop.io.WritableComparable","args":[],"exceptions":[]},"void skip(org.apache.hadoop.io.WritableComparable) throws java.io.IOException":{"name":"skip","returnType":"void","args":["org.apache.hadoop.io.WritableComparable"],"exceptions":["java.io.IOException"]},"boolean next(org.apache.hadoop.io.WritableComparable, org.apache.hadoop.io.Writable) throws java.io.IOException":{"name":"next","returnType":"boolean","args":["org.apache.hadoop.io.WritableComparable","org.apache.hadoop.io.Writable"],"exceptions":["java.io.IOException"]},"int compareTo(java.lang.Object)":{"name":"compareTo","returnType":"int","args":["java.lang.Object"],"exceptions":[]},"boolean hasNext()":{"name":"hasNext","returnType":"boolean","args":[],"exceptions":[]},"boolean next(java.lang.Object, java.lang.Object) throws java.io.IOException":{"name":"next","returnType":"boolean","args":["java.lang.Object","java.lang.Object"],"exceptions":["java.io.IOException"]},"boolean equals(java.lang.Object)":{"name":"equals","returnType":"boolean","args":["java.lang.Object"],"exceptions":[]},"int compareTo(org.apache.hadoop.mapred.join.ComposableRecordReader)":{"name":"compareTo","returnType":"int","args":["org.apache.hadoop.mapred.join.ComposableRecordReader"],"exceptions":[]},"java.lang.Object createKey()":{"name":"createKey","returnType":"java.lang.Object","args":[],"exceptions":[]},"org.apache.hadoop.io.WritableComparable createKey()":{"name":"createKey","returnType":"org.apache.hadoop.io.WritableComparable","args":[],"exceptions":[]},"void setConf(org.apache.hadoop.conf.Configuration)":{"name":"setConf","returnType":"void","args":["org.apache.hadoop.conf.Configuration"],"exceptions":[]},"org.apache.hadoop.io.Writable createValue()":{"name":"createValue","returnType":"org.apache.hadoop.io.Writable","args":[],"exceptions":[]},"void close() throws java.io.IOException":{"name":"close","returnType":"void","args":[],"exceptions":["java.io.IOException"]},"void key(org.apache.hadoop.io.WritableComparable) throws java.io.IOException":{"name":"key","returnType":"void","args":["org.apache.hadoop.io.WritableComparable"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.mapreduce.TaskAttemptID":{"name":"org.apache.hadoop.mapreduce.TaskAttemptID","methods":{"int hashCode()":{"name":"hashCode","returnType":"int","args":[],"exceptions":[]},"int compareTo(org.apache.hadoop.mapreduce.ID)":{"name":"compareTo","returnType":"int","args":["org.apache.hadoop.mapreduce.ID"],"exceptions":[]},"java.lang.String toString()":{"name":"toString","returnType":"java.lang.String","args":[],"exceptions":[]},"void write(java.io.DataOutput) throws java.io.IOException":{"name":"write","returnType":"void","args":["java.io.DataOutput"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.mapreduce.TaskID getTaskID()":{"name":"getTaskID","returnType":"org.apache.hadoop.mapreduce.TaskID","args":[],"exceptions":[]},"boolean isMap()":{"name":"isMap","returnType":"boolean","args":[],"exceptions":[]},"org.apache.hadoop.mapreduce.JobID getJobID()":{"name":"getJobID","returnType":"org.apache.hadoop.mapreduce.JobID","args":[],"exceptions":[]},"org.apache.hadoop.mapreduce.TaskAttemptID forName(java.lang.String) throws java.lang.IllegalArgumentException":{"name":"forName","returnType":"org.apache.hadoop.mapreduce.TaskAttemptID","args":["java.lang.String"],"exceptions":["java.lang.IllegalArgumentException"]},"int compareTo(java.lang.Object)":{"name":"compareTo","returnType":"int","args":["java.lang.Object"],"exceptions":[]},"org.apache.hadoop.mapreduce.TaskType getTaskType()":{"name":"getTaskType","returnType":"org.apache.hadoop.mapreduce.TaskType","args":[],"exceptions":[]},"boolean equals(java.lang.Object)":{"name":"equals","returnType":"boolean","args":["java.lang.Object"],"exceptions":[]},"void readFields(java.io.DataInput) throws java.io.IOException":{"name":"readFields","returnType":"void","args":["java.io.DataInput"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.mapred.lib.LazyOutputFormat":{"name":"org.apache.hadoop.mapred.lib.LazyOutputFormat","methods":{"void setOutputFormatClass(org.apache.hadoop.mapred.JobConf, java.lang.Class)":{"name":"setOutputFormatClass","returnType":"void","args":["org.apache.hadoop.mapred.JobConf","java.lang.Class"],"exceptions":[]},"void checkOutputSpecs(org.apache.hadoop.fs.FileSystem, org.apache.hadoop.mapred.JobConf) throws java.io.IOException":{"name":"checkOutputSpecs","returnType":"void","args":["org.apache.hadoop.fs.FileSystem","org.apache.hadoop.mapred.JobConf"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.mapred.RecordWriter getRecordWriter(org.apache.hadoop.fs.FileSystem, org.apache.hadoop.mapred.JobConf, java.lang.String, org.apache.hadoop.util.Progressable) throws java.io.IOException":{"name":"getRecordWriter","returnType":"org.apache.hadoop.mapred.RecordWriter","args":["org.apache.hadoop.fs.FileSystem","org.apache.hadoop.mapred.JobConf","java.lang.String","org.apache.hadoop.util.Progressable"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.mapreduce.lib.db.DBOutputFormat":{"name":"org.apache.hadoop.mapreduce.lib.db.DBOutputFormat","methods":{"void setOutput(org.apache.hadoop.mapreduce.Job, java.lang.String, [Ljava.lang.String;) throws java.io.IOException":{"name":"setOutput","returnType":"void","args":["org.apache.hadoop.mapreduce.Job","java.lang.String","[Ljava.lang.String;"],"exceptions":["java.io.IOException"]},"void setOutput(org.apache.hadoop.mapreduce.Job, java.lang.String, int) throws java.io.IOException":{"name":"setOutput","returnType":"void","args":["org.apache.hadoop.mapreduce.Job","java.lang.String","int"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.mapreduce.RecordWriter getRecordWriter(org.apache.hadoop.mapreduce.TaskAttemptContext) throws java.io.IOException":{"name":"getRecordWriter","returnType":"org.apache.hadoop.mapreduce.RecordWriter","args":["org.apache.hadoop.mapreduce.TaskAttemptContext"],"exceptions":["java.io.IOException"]},"java.lang.String constructQuery(java.lang.String, [Ljava.lang.String;)":{"name":"constructQuery","returnType":"java.lang.String","args":["java.lang.String","[Ljava.lang.String;"],"exceptions":[]},"org.apache.hadoop.mapreduce.OutputCommitter getOutputCommitter(org.apache.hadoop.mapreduce.TaskAttemptContext) throws java.lang.InterruptedException, java.io.IOException":{"name":"getOutputCommitter","returnType":"org.apache.hadoop.mapreduce.OutputCommitter","args":["org.apache.hadoop.mapreduce.TaskAttemptContext"],"exceptions":["java.lang.InterruptedException","java.io.IOException"]},"void checkOutputSpecs(org.apache.hadoop.mapreduce.JobContext) throws java.lang.InterruptedException, java.io.IOException":{"name":"checkOutputSpecs","returnType":"void","args":["org.apache.hadoop.mapreduce.JobContext"],"exceptions":["java.lang.InterruptedException","java.io.IOException"]}}},"org.apache.hadoop.mapred.InvalidJobConfException":{"name":"org.apache.hadoop.mapred.InvalidJobConfException","methods":{}},"org.apache.hadoop.mapred.SequenceFileRecordReader":{"name":"org.apache.hadoop.mapred.SequenceFileRecordReader","methods":{"java.lang.Class getKeyClass()":{"name":"getKeyClass","returnType":"java.lang.Class","args":[],"exceptions":[]},"long getPos() throws java.io.IOException":{"name":"getPos","returnType":"long","args":[],"exceptions":["java.io.IOException"]},"float getProgress() throws java.io.IOException":{"name":"getProgress","returnType":"float","args":[],"exceptions":["java.io.IOException"]},"java.lang.Object createKey()":{"name":"createKey","returnType":"java.lang.Object","args":[],"exceptions":[]},"java.lang.Object createValue()":{"name":"createValue","returnType":"java.lang.Object","args":[],"exceptions":[]},"boolean next(java.lang.Object, java.lang.Object) throws java.io.IOException":{"name":"next","returnType":"boolean","args":["java.lang.Object","java.lang.Object"],"exceptions":["java.io.IOException"]},"void close() throws java.io.IOException":{"name":"close","returnType":"void","args":[],"exceptions":["java.io.IOException"]},"java.lang.Class getValueClass()":{"name":"getValueClass","returnType":"java.lang.Class","args":[],"exceptions":[]}}},"org.apache.hadoop.mapred.join.InnerJoinRecordReader":{"name":"org.apache.hadoop.mapred.join.InnerJoinRecordReader","methods":{}},"org.apache.hadoop.mapred.lib.MultipleOutputs":{"name":"org.apache.hadoop.mapred.lib.MultipleOutputs","methods":{"boolean getCountersEnabled(org.apache.hadoop.mapred.JobConf)":{"name":"getCountersEnabled","returnType":"boolean","args":["org.apache.hadoop.mapred.JobConf"],"exceptions":[]},"org.apache.hadoop.mapred.OutputCollector getCollector(java.lang.String, java.lang.String, org.apache.hadoop.mapred.Reporter) throws java.io.IOException":{"name":"getCollector","returnType":"org.apache.hadoop.mapred.OutputCollector","args":["java.lang.String","java.lang.String","org.apache.hadoop.mapred.Reporter"],"exceptions":["java.io.IOException"]},"java.lang.Class getNamedOutputKeyClass(org.apache.hadoop.mapred.JobConf, java.lang.String)":{"name":"getNamedOutputKeyClass","returnType":"java.lang.Class","args":["org.apache.hadoop.mapred.JobConf","java.lang.String"],"exceptions":[]},"void setCountersEnabled(org.apache.hadoop.mapred.JobConf, boolean)":{"name":"setCountersEnabled","returnType":"void","args":["org.apache.hadoop.mapred.JobConf","boolean"],"exceptions":[]},"boolean isMultiNamedOutput(org.apache.hadoop.mapred.JobConf, java.lang.String)":{"name":"isMultiNamedOutput","returnType":"boolean","args":["org.apache.hadoop.mapred.JobConf","java.lang.String"],"exceptions":[]},"java.lang.Class getNamedOutputFormatClass(org.apache.hadoop.mapred.JobConf, java.lang.String)":{"name":"getNamedOutputFormatClass","returnType":"java.lang.Class","args":["org.apache.hadoop.mapred.JobConf","java.lang.String"],"exceptions":[]},"java.util.Iterator getNamedOutputs()":{"name":"getNamedOutputs","returnType":"java.util.Iterator","args":[],"exceptions":[]},"java.util.List getNamedOutputsList(org.apache.hadoop.mapred.JobConf)":{"name":"getNamedOutputsList","returnType":"java.util.List","args":["org.apache.hadoop.mapred.JobConf"],"exceptions":[]},"org.apache.hadoop.mapred.OutputCollector getCollector(java.lang.String, org.apache.hadoop.mapred.Reporter) throws java.io.IOException":{"name":"getCollector","returnType":"org.apache.hadoop.mapred.OutputCollector","args":["java.lang.String","org.apache.hadoop.mapred.Reporter"],"exceptions":["java.io.IOException"]},"void addMultiNamedOutput(org.apache.hadoop.mapred.JobConf, java.lang.String, java.lang.Class, java.lang.Class, java.lang.Class)":{"name":"addMultiNamedOutput","returnType":"void","args":["org.apache.hadoop.mapred.JobConf","java.lang.String","java.lang.Class","java.lang.Class","java.lang.Class"],"exceptions":[]},"void addNamedOutput(org.apache.hadoop.mapred.JobConf, java.lang.String, java.lang.Class, java.lang.Class, java.lang.Class)":{"name":"addNamedOutput","returnType":"void","args":["org.apache.hadoop.mapred.JobConf","java.lang.String","java.lang.Class","java.lang.Class","java.lang.Class"],"exceptions":[]},"void close() throws java.io.IOException":{"name":"close","returnType":"void","args":[],"exceptions":["java.io.IOException"]},"java.lang.Class getNamedOutputValueClass(org.apache.hadoop.mapred.JobConf, java.lang.String)":{"name":"getNamedOutputValueClass","returnType":"java.lang.Class","args":["org.apache.hadoop.mapred.JobConf","java.lang.String"],"exceptions":[]}}},"org.apache.hadoop.mapreduce.lib.partition.TotalOrderPartitioner":{"name":"org.apache.hadoop.mapreduce.lib.partition.TotalOrderPartitioner","methods":{"org.apache.hadoop.conf.Configuration getConf()":{"name":"getConf","returnType":"org.apache.hadoop.conf.Configuration","args":[],"exceptions":[]},"void setConf(org.apache.hadoop.conf.Configuration)":{"name":"setConf","returnType":"void","args":["org.apache.hadoop.conf.Configuration"],"exceptions":[]},"int getPartition(java.lang.Object, java.lang.Object, int)":{"name":"getPartition","returnType":"int","args":["java.lang.Object","java.lang.Object","int"],"exceptions":[]},"java.lang.String getPartitionFile(org.apache.hadoop.conf.Configuration)":{"name":"getPartitionFile","returnType":"java.lang.String","args":["org.apache.hadoop.conf.Configuration"],"exceptions":[]},"void setPartitionFile(org.apache.hadoop.conf.Configuration, org.apache.hadoop.fs.Path)":{"name":"setPartitionFile","returnType":"void","args":["org.apache.hadoop.conf.Configuration","org.apache.hadoop.fs.Path"],"exceptions":[]},"int getPartition(org.apache.hadoop.io.WritableComparable, java.lang.Object, int)":{"name":"getPartition","returnType":"int","args":["org.apache.hadoop.io.WritableComparable","java.lang.Object","int"],"exceptions":[]}}},"org.apache.hadoop.mapred.MultiFileInputFormat":{"name":"org.apache.hadoop.mapred.MultiFileInputFormat","methods":{"org.apache.hadoop.mapred.RecordReader getRecordReader(org.apache.hadoop.mapred.InputSplit, org.apache.hadoop.mapred.JobConf, org.apache.hadoop.mapred.Reporter) throws java.io.IOException":{"name":"getRecordReader","returnType":"org.apache.hadoop.mapred.RecordReader","args":["org.apache.hadoop.mapred.InputSplit","org.apache.hadoop.mapred.JobConf","org.apache.hadoop.mapred.Reporter"],"exceptions":["java.io.IOException"]},"[Lorg.apache.hadoop.mapred.InputSplit; getSplits(org.apache.hadoop.mapred.JobConf, int) throws java.io.IOException":{"name":"getSplits","returnType":"[Lorg.apache.hadoop.mapred.InputSplit;","args":["org.apache.hadoop.mapred.JobConf","int"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.mapreduce.lib.output.SequenceFileOutputFormat":{"name":"org.apache.hadoop.mapreduce.lib.output.SequenceFileOutputFormat","methods":{"org.apache.hadoop.io.SequenceFile$CompressionType getOutputCompressionType(org.apache.hadoop.mapreduce.JobContext)":{"name":"getOutputCompressionType","returnType":"org.apache.hadoop.io.SequenceFile$CompressionType","args":["org.apache.hadoop.mapreduce.JobContext"],"exceptions":[]},"org.apache.hadoop.mapreduce.RecordWriter getRecordWriter(org.apache.hadoop.mapreduce.TaskAttemptContext) throws java.lang.InterruptedException, java.io.IOException":{"name":"getRecordWriter","returnType":"org.apache.hadoop.mapreduce.RecordWriter","args":["org.apache.hadoop.mapreduce.TaskAttemptContext"],"exceptions":["java.lang.InterruptedException","java.io.IOException"]},"void setOutputCompressionType(org.apache.hadoop.mapreduce.Job, org.apache.hadoop.io.SequenceFile$CompressionType)":{"name":"setOutputCompressionType","returnType":"void","args":["org.apache.hadoop.mapreduce.Job","org.apache.hadoop.io.SequenceFile$CompressionType"],"exceptions":[]}}},"org.apache.hadoop.mapred.Counters":{"name":"org.apache.hadoop.mapred.Counters","methods":{"void log(org.apache.commons.logging.Log)":{"name":"log","returnType":"void","args":["org.apache.commons.logging.Log"],"exceptions":[]},"void incrAllCounters(org.apache.hadoop.mapred.Counters)":{"name":"incrAllCounters","returnType":"void","args":["org.apache.hadoop.mapred.Counters"],"exceptions":[]},"org.apache.hadoop.mapred.Counters$Group getGroup(java.lang.String)":{"name":"getGroup","returnType":"org.apache.hadoop.mapred.Counters$Group","args":["java.lang.String"],"exceptions":[]},"void incrCounter(java.lang.String, java.lang.String, long)":{"name":"incrCounter","returnType":"void","args":["java.lang.String","java.lang.String","long"],"exceptions":[]},"org.apache.hadoop.mapred.Counters$Counter findCounter(java.lang.String, java.lang.String)":{"name":"findCounter","returnType":"org.apache.hadoop.mapred.Counters$Counter","args":["java.lang.String","java.lang.String"],"exceptions":[]},"int size()":{"name":"size","returnType":"int","args":[],"exceptions":[]},"java.lang.String makeEscapedCompactString()":{"name":"makeEscapedCompactString","returnType":"java.lang.String","args":[],"exceptions":[]},"void incrCounter(java.lang.Enum, long)":{"name":"incrCounter","returnType":"void","args":["java.lang.Enum","long"],"exceptions":[]},"org.apache.hadoop.mapreduce.Counter findCounter(java.lang.String, java.lang.String)":{"name":"findCounter","returnType":"org.apache.hadoop.mapreduce.Counter","args":["java.lang.String","java.lang.String"],"exceptions":[]},"org.apache.hadoop.mapred.Counters sum(org.apache.hadoop.mapred.Counters, org.apache.hadoop.mapred.Counters)":{"name":"sum","returnType":"org.apache.hadoop.mapred.Counters","args":["org.apache.hadoop.mapred.Counters","org.apache.hadoop.mapred.Counters"],"exceptions":[]},"org.apache.hadoop.mapred.Counters$Counter findCounter(java.lang.String, int, java.lang.String)":{"name":"findCounter","returnType":"org.apache.hadoop.mapred.Counters$Counter","args":["java.lang.String","int","java.lang.String"],"exceptions":[]},"org.apache.hadoop.mapreduce.counters.CounterGroupBase getGroup(java.lang.String)":{"name":"getGroup","returnType":"org.apache.hadoop.mapreduce.counters.CounterGroupBase","args":["java.lang.String"],"exceptions":[]},"java.lang.String makeCompactString()":{"name":"makeCompactString","returnType":"java.lang.String","args":[],"exceptions":[]},"org.apache.hadoop.mapred.Counters fromEscapedCompactString(java.lang.String) throws java.text.ParseException":{"name":"fromEscapedCompactString","returnType":"org.apache.hadoop.mapred.Counters","args":["java.lang.String"],"exceptions":["java.text.ParseException"]},"long getCounter(java.lang.Enum)":{"name":"getCounter","returnType":"long","args":["java.lang.Enum"],"exceptions":[]},"java.util.Collection getGroupNames()":{"name":"getGroupNames","returnType":"java.util.Collection","args":[],"exceptions":[]},"java.lang.Iterable getGroupNames()":{"name":"getGroupNames","returnType":"java.lang.Iterable","args":[],"exceptions":[]}}},"org.apache.hadoop.mapred.join.ResetableIterator":{"name":"org.apache.hadoop.mapred.join.ResetableIterator","methods":{}},"org.apache.hadoop.mapreduce.lib.input.CombineTextInputFormat":{"name":"org.apache.hadoop.mapreduce.lib.input.CombineTextInputFormat","methods":{"org.apache.hadoop.mapreduce.RecordReader createRecordReader(org.apache.hadoop.mapreduce.InputSplit, org.apache.hadoop.mapreduce.TaskAttemptContext) throws java.io.IOException":{"name":"createRecordReader","returnType":"org.apache.hadoop.mapreduce.RecordReader","args":["org.apache.hadoop.mapreduce.InputSplit","org.apache.hadoop.mapreduce.TaskAttemptContext"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.mapred.KeyValueTextInputFormat":{"name":"org.apache.hadoop.mapred.KeyValueTextInputFormat","methods":{"org.apache.hadoop.mapred.RecordReader getRecordReader(org.apache.hadoop.mapred.InputSplit, org.apache.hadoop.mapred.JobConf, org.apache.hadoop.mapred.Reporter) throws java.io.IOException":{"name":"getRecordReader","returnType":"org.apache.hadoop.mapred.RecordReader","args":["org.apache.hadoop.mapred.InputSplit","org.apache.hadoop.mapred.JobConf","org.apache.hadoop.mapred.Reporter"],"exceptions":["java.io.IOException"]},"void configure(org.apache.hadoop.mapred.JobConf)":{"name":"configure","returnType":"void","args":["org.apache.hadoop.mapred.JobConf"],"exceptions":[]}}},"org.apache.hadoop.mapreduce.lib.input.SequenceFileAsBinaryInputFormat":{"name":"org.apache.hadoop.mapreduce.lib.input.SequenceFileAsBinaryInputFormat","methods":{"org.apache.hadoop.mapreduce.RecordReader createRecordReader(org.apache.hadoop.mapreduce.InputSplit, org.apache.hadoop.mapreduce.TaskAttemptContext) throws java.io.IOException":{"name":"createRecordReader","returnType":"org.apache.hadoop.mapreduce.RecordReader","args":["org.apache.hadoop.mapreduce.InputSplit","org.apache.hadoop.mapreduce.TaskAttemptContext"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.mapred.join.ArrayListBackedIterator":{"name":"org.apache.hadoop.mapred.join.ArrayListBackedIterator","methods":{}},"org.apache.hadoop.mapred.lib.db.DBConfiguration":{"name":"org.apache.hadoop.mapred.lib.db.DBConfiguration","methods":{"void configureDB(org.apache.hadoop.mapred.JobConf, java.lang.String, java.lang.String, java.lang.String, java.lang.String)":{"name":"configureDB","returnType":"void","args":["org.apache.hadoop.mapred.JobConf","java.lang.String","java.lang.String","java.lang.String","java.lang.String"],"exceptions":[]},"void configureDB(org.apache.hadoop.mapred.JobConf, java.lang.String, java.lang.String)":{"name":"configureDB","returnType":"void","args":["org.apache.hadoop.mapred.JobConf","java.lang.String","java.lang.String"],"exceptions":[]}}},"org.apache.hadoop.mapred.lib.KeyFieldBasedComparator":{"name":"org.apache.hadoop.mapred.lib.KeyFieldBasedComparator","methods":{"void configure(org.apache.hadoop.mapred.JobConf)":{"name":"configure","returnType":"void","args":["org.apache.hadoop.mapred.JobConf"],"exceptions":[]}}},"org.apache.hadoop.mapreduce.lib.input.CombineFileRecordReaderWrapper":{"name":"org.apache.hadoop.mapreduce.lib.input.CombineFileRecordReaderWrapper","methods":{"java.lang.Object getCurrentValue() throws java.lang.InterruptedException, java.io.IOException":{"name":"getCurrentValue","returnType":"java.lang.Object","args":[],"exceptions":["java.lang.InterruptedException","java.io.IOException"]},"void initialize(org.apache.hadoop.mapreduce.InputSplit, org.apache.hadoop.mapreduce.TaskAttemptContext) throws java.lang.InterruptedException, java.io.IOException":{"name":"initialize","returnType":"void","args":["org.apache.hadoop.mapreduce.InputSplit","org.apache.hadoop.mapreduce.TaskAttemptContext"],"exceptions":["java.lang.InterruptedException","java.io.IOException"]},"boolean nextKeyValue() throws java.lang.InterruptedException, java.io.IOException":{"name":"nextKeyValue","returnType":"boolean","args":[],"exceptions":["java.lang.InterruptedException","java.io.IOException"]},"float getProgress() throws java.lang.InterruptedException, java.io.IOException":{"name":"getProgress","returnType":"float","args":[],"exceptions":["java.lang.InterruptedException","java.io.IOException"]},"java.lang.Object getCurrentKey() throws java.lang.InterruptedException, java.io.IOException":{"name":"getCurrentKey","returnType":"java.lang.Object","args":[],"exceptions":["java.lang.InterruptedException","java.io.IOException"]},"void close() throws java.io.IOException":{"name":"close","returnType":"void","args":[],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.mapred.TaskReport":{"name":"org.apache.hadoop.mapred.TaskReport","methods":{"org.apache.hadoop.mapred.TaskAttemptID getSuccessfulTaskAttempt()":{"name":"getSuccessfulTaskAttempt","returnType":"org.apache.hadoop.mapred.TaskAttemptID","args":[],"exceptions":[]},"java.util.Collection getRunningTaskAttempts()":{"name":"getRunningTaskAttempts","returnType":"java.util.Collection","args":[],"exceptions":[]},"org.apache.hadoop.mapred.Counters getCounters()":{"name":"getCounters","returnType":"org.apache.hadoop.mapred.Counters","args":[],"exceptions":[]},"org.apache.hadoop.mapred.TaskID getTaskID()":{"name":"getTaskID","returnType":"org.apache.hadoop.mapred.TaskID","args":[],"exceptions":[]},"void setSuccessfulAttempt(org.apache.hadoop.mapred.TaskAttemptID)":{"name":"setSuccessfulAttempt","returnType":"void","args":["org.apache.hadoop.mapred.TaskAttemptID"],"exceptions":[]},"void setRunningTaskAttempts(java.util.Collection)":{"name":"setRunningTaskAttempts","returnType":"void","args":["java.util.Collection"],"exceptions":[]},"java.lang.String getTaskId()":{"name":"getTaskId","returnType":"java.lang.String","args":[],"exceptions":[]}}},"org.apache.hadoop.mapreduce.lib.map.TokenCounterMapper":{"name":"org.apache.hadoop.mapreduce.lib.map.TokenCounterMapper","methods":{"void map(java.lang.Object, java.lang.Object, org.apache.hadoop.mapreduce.Mapper$Context) throws java.lang.InterruptedException, java.io.IOException":{"name":"map","returnType":"void","args":["java.lang.Object","java.lang.Object","org.apache.hadoop.mapreduce.Mapper$Context"],"exceptions":["java.lang.InterruptedException","java.io.IOException"]},"void map(java.lang.Object, org.apache.hadoop.io.Text, org.apache.hadoop.mapreduce.Mapper$Context) throws java.lang.InterruptedException, java.io.IOException":{"name":"map","returnType":"void","args":["java.lang.Object","org.apache.hadoop.io.Text","org.apache.hadoop.mapreduce.Mapper$Context"],"exceptions":["java.lang.InterruptedException","java.io.IOException"]}}},"org.apache.hadoop.mapreduce.lib.output.TextOutputFormat":{"name":"org.apache.hadoop.mapreduce.lib.output.TextOutputFormat","methods":{"org.apache.hadoop.mapreduce.RecordWriter getRecordWriter(org.apache.hadoop.mapreduce.TaskAttemptContext) throws java.lang.InterruptedException, java.io.IOException":{"name":"getRecordWriter","returnType":"org.apache.hadoop.mapreduce.RecordWriter","args":["org.apache.hadoop.mapreduce.TaskAttemptContext"],"exceptions":["java.lang.InterruptedException","java.io.IOException"]}}},"org.apache.hadoop.mapreduce.lib.aggregate.ValueAggregatorJobBase":{"name":"org.apache.hadoop.mapreduce.lib.aggregate.ValueAggregatorJobBase","methods":{"void setup(org.apache.hadoop.conf.Configuration)":{"name":"setup","returnType":"void","args":["org.apache.hadoop.conf.Configuration"],"exceptions":[]}}},"org.apache.hadoop.mapred.lib.NullOutputFormat":{"name":"org.apache.hadoop.mapred.lib.NullOutputFormat","methods":{"org.apache.hadoop.mapred.RecordWriter getRecordWriter(org.apache.hadoop.fs.FileSystem, org.apache.hadoop.mapred.JobConf, java.lang.String, org.apache.hadoop.util.Progressable)":{"name":"getRecordWriter","returnType":"org.apache.hadoop.mapred.RecordWriter","args":["org.apache.hadoop.fs.FileSystem","org.apache.hadoop.mapred.JobConf","java.lang.String","org.apache.hadoop.util.Progressable"],"exceptions":[]},"void checkOutputSpecs(org.apache.hadoop.fs.FileSystem, org.apache.hadoop.mapred.JobConf)":{"name":"checkOutputSpecs","returnType":"void","args":["org.apache.hadoop.fs.FileSystem","org.apache.hadoop.mapred.JobConf"],"exceptions":[]}}},"org.apache.hadoop.mapred.InvalidFileTypeException":{"name":"org.apache.hadoop.mapred.InvalidFileTypeException","methods":{}},"org.apache.hadoop.mapreduce.tools.CLI":{"name":"org.apache.hadoop.mapreduce.tools.CLI","methods":{"void displayJobList([Lorg.apache.hadoop.mapreduce.JobStatus;, java.io.PrintWriter)":{"name":"displayJobList","returnType":"void","args":["[Lorg.apache.hadoop.mapreduce.JobStatus;","java.io.PrintWriter"],"exceptions":[]},"void displayJobList([Lorg.apache.hadoop.mapreduce.JobStatus;) throws java.lang.InterruptedException, java.io.IOException":{"name":"displayJobList","returnType":"void","args":["[Lorg.apache.hadoop.mapreduce.JobStatus;"],"exceptions":["java.lang.InterruptedException","java.io.IOException"]},"void main([Ljava.lang.String;) throws java.lang.Exception":{"name":"main","returnType":"void","args":["[Ljava.lang.String;"],"exceptions":["java.lang.Exception"]},"int run([Ljava.lang.String;) throws java.lang.Exception":{"name":"run","returnType":"int","args":["[Ljava.lang.String;"],"exceptions":["java.lang.Exception"]}}},"org.apache.hadoop.mapred.lib.db.DBInputFormat":{"name":"org.apache.hadoop.mapred.lib.db.DBInputFormat","methods":{"org.apache.hadoop.mapred.RecordReader getRecordReader(org.apache.hadoop.mapred.InputSplit, org.apache.hadoop.mapred.JobConf, org.apache.hadoop.mapred.Reporter) throws java.io.IOException":{"name":"getRecordReader","returnType":"org.apache.hadoop.mapred.RecordReader","args":["org.apache.hadoop.mapred.InputSplit","org.apache.hadoop.mapred.JobConf","org.apache.hadoop.mapred.Reporter"],"exceptions":["java.io.IOException"]},"void setInput(org.apache.hadoop.mapred.JobConf, java.lang.Class, java.lang.String, java.lang.String, java.lang.String, [Ljava.lang.String;)":{"name":"setInput","returnType":"void","args":["org.apache.hadoop.mapred.JobConf","java.lang.Class","java.lang.String","java.lang.String","java.lang.String","[Ljava.lang.String;"],"exceptions":[]},"void setInput(org.apache.hadoop.mapred.JobConf, java.lang.Class, java.lang.String, java.lang.String)":{"name":"setInput","returnType":"void","args":["org.apache.hadoop.mapred.JobConf","java.lang.Class","java.lang.String","java.lang.String"],"exceptions":[]},"void configure(org.apache.hadoop.mapred.JobConf)":{"name":"configure","returnType":"void","args":["org.apache.hadoop.mapred.JobConf"],"exceptions":[]},"[Lorg.apache.hadoop.mapred.InputSplit; getSplits(org.apache.hadoop.mapred.JobConf, int) throws java.io.IOException":{"name":"getSplits","returnType":"[Lorg.apache.hadoop.mapred.InputSplit;","args":["org.apache.hadoop.mapred.JobConf","int"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.mapreduce.lib.join.ComposableInputFormat":{"name":"org.apache.hadoop.mapreduce.lib.join.ComposableInputFormat","methods":{"org.apache.hadoop.mapreduce.RecordReader createRecordReader(org.apache.hadoop.mapreduce.InputSplit, org.apache.hadoop.mapreduce.TaskAttemptContext) throws java.lang.InterruptedException, java.io.IOException":{"name":"createRecordReader","returnType":"org.apache.hadoop.mapreduce.RecordReader","args":["org.apache.hadoop.mapreduce.InputSplit","org.apache.hadoop.mapreduce.TaskAttemptContext"],"exceptions":["java.lang.InterruptedException","java.io.IOException"]},"org.apache.hadoop.mapreduce.lib.join.ComposableRecordReader createRecordReader(org.apache.hadoop.mapreduce.InputSplit, org.apache.hadoop.mapreduce.TaskAttemptContext) throws java.lang.InterruptedException, java.io.IOException":{"name":"createRecordReader","returnType":"org.apache.hadoop.mapreduce.lib.join.ComposableRecordReader","args":["org.apache.hadoop.mapreduce.InputSplit","org.apache.hadoop.mapreduce.TaskAttemptContext"],"exceptions":["java.lang.InterruptedException","java.io.IOException"]}}},"org.apache.hadoop.mapred.Reporter":{"name":"org.apache.hadoop.mapred.Reporter","methods":{"void incrCounter(java.lang.Enum, long)":{"name":"incrCounter","returnType":"void","args":["java.lang.Enum","long"],"exceptions":[]},"void incrCounter(java.lang.String, java.lang.String, long)":{"name":"incrCounter","returnType":"void","args":["java.lang.String","java.lang.String","long"],"exceptions":[]},"org.apache.hadoop.mapred.Counters$Counter getCounter(java.lang.String, java.lang.String)":{"name":"getCounter","returnType":"org.apache.hadoop.mapred.Counters$Counter","args":["java.lang.String","java.lang.String"],"exceptions":[]},"org.apache.hadoop.mapred.Counters$Counter getCounter(java.lang.Enum)":{"name":"getCounter","returnType":"org.apache.hadoop.mapred.Counters$Counter","args":["java.lang.Enum"],"exceptions":[]},"org.apache.hadoop.mapred.InputSplit getInputSplit() throws java.lang.UnsupportedOperationException":{"name":"getInputSplit","returnType":"org.apache.hadoop.mapred.InputSplit","args":[],"exceptions":["java.lang.UnsupportedOperationException"]},"float getProgress()":{"name":"getProgress","returnType":"float","args":[],"exceptions":[]},"void setStatus(java.lang.String)":{"name":"setStatus","returnType":"void","args":["java.lang.String"],"exceptions":[]}}},"org.apache.hadoop.mapreduce.InputFormat":{"name":"org.apache.hadoop.mapreduce.InputFormat","methods":{"org.apache.hadoop.mapreduce.RecordReader createRecordReader(org.apache.hadoop.mapreduce.InputSplit, org.apache.hadoop.mapreduce.TaskAttemptContext) throws java.lang.InterruptedException, java.io.IOException":{"name":"createRecordReader","returnType":"org.apache.hadoop.mapreduce.RecordReader","args":["org.apache.hadoop.mapreduce.InputSplit","org.apache.hadoop.mapreduce.TaskAttemptContext"],"exceptions":["java.lang.InterruptedException","java.io.IOException"]},"java.util.List getSplits(org.apache.hadoop.mapreduce.JobContext) throws java.lang.InterruptedException, java.io.IOException":{"name":"getSplits","returnType":"java.util.List","args":["org.apache.hadoop.mapreduce.JobContext"],"exceptions":["java.lang.InterruptedException","java.io.IOException"]}}},"org.apache.hadoop.mapred.lib.InputSampler":{"name":"org.apache.hadoop.mapred.lib.InputSampler","methods":{"void writePartitionFile(org.apache.hadoop.mapred.JobConf, org.apache.hadoop.mapred.lib.InputSampler$Sampler) throws java.lang.InterruptedException, java.io.IOException, java.lang.ClassNotFoundException":{"name":"writePartitionFile","returnType":"void","args":["org.apache.hadoop.mapred.JobConf","org.apache.hadoop.mapred.lib.InputSampler$Sampler"],"exceptions":["java.lang.InterruptedException","java.io.IOException","java.lang.ClassNotFoundException"]}}},"org.apache.hadoop.mapred.lib.aggregate.ValueAggregatorCombiner":{"name":"org.apache.hadoop.mapred.lib.aggregate.ValueAggregatorCombiner","methods":{"void map(java.lang.Object, java.lang.Object, org.apache.hadoop.mapred.OutputCollector, org.apache.hadoop.mapred.Reporter) throws java.io.IOException":{"name":"map","returnType":"void","args":["java.lang.Object","java.lang.Object","org.apache.hadoop.mapred.OutputCollector","org.apache.hadoop.mapred.Reporter"],"exceptions":["java.io.IOException"]},"void reduce(java.lang.Object, java.util.Iterator, org.apache.hadoop.mapred.OutputCollector, org.apache.hadoop.mapred.Reporter) throws java.io.IOException":{"name":"reduce","returnType":"void","args":["java.lang.Object","java.util.Iterator","org.apache.hadoop.mapred.OutputCollector","org.apache.hadoop.mapred.Reporter"],"exceptions":["java.io.IOException"]},"void map(org.apache.hadoop.io.WritableComparable, org.apache.hadoop.io.Writable, org.apache.hadoop.mapred.OutputCollector, org.apache.hadoop.mapred.Reporter) throws java.io.IOException":{"name":"map","returnType":"void","args":["org.apache.hadoop.io.WritableComparable","org.apache.hadoop.io.Writable","org.apache.hadoop.mapred.OutputCollector","org.apache.hadoop.mapred.Reporter"],"exceptions":["java.io.IOException"]},"void reduce(org.apache.hadoop.io.Text, java.util.Iterator, org.apache.hadoop.mapred.OutputCollector, org.apache.hadoop.mapred.Reporter) throws java.io.IOException":{"name":"reduce","returnType":"void","args":["org.apache.hadoop.io.Text","java.util.Iterator","org.apache.hadoop.mapred.OutputCollector","org.apache.hadoop.mapred.Reporter"],"exceptions":["java.io.IOException"]},"void configure(org.apache.hadoop.mapred.JobConf)":{"name":"configure","returnType":"void","args":["org.apache.hadoop.mapred.JobConf"],"exceptions":[]},"void close() throws java.io.IOException":{"name":"close","returnType":"void","args":[],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.mapreduce.lib.aggregate.StringValueMax":{"name":"org.apache.hadoop.mapreduce.lib.aggregate.StringValueMax","methods":{"void reset()":{"name":"reset","returnType":"void","args":[],"exceptions":[]},"java.lang.String getVal()":{"name":"getVal","returnType":"java.lang.String","args":[],"exceptions":[]},"java.util.ArrayList getCombinerOutput()":{"name":"getCombinerOutput","returnType":"java.util.ArrayList","args":[],"exceptions":[]},"java.lang.String getReport()":{"name":"getReport","returnType":"java.lang.String","args":[],"exceptions":[]},"void addNextValue(java.lang.Object)":{"name":"addNextValue","returnType":"void","args":["java.lang.Object"],"exceptions":[]}}},"org.apache.hadoop.mapreduce.lib.join.ArrayListBackedIterator":{"name":"org.apache.hadoop.mapreduce.lib.join.ArrayListBackedIterator","methods":{"void add(org.apache.hadoop.io.Writable) throws java.io.IOException":{"name":"add","returnType":"void","args":["org.apache.hadoop.io.Writable"],"exceptions":["java.io.IOException"]},"boolean next(org.apache.hadoop.io.Writable) throws java.io.IOException":{"name":"next","returnType":"boolean","args":["org.apache.hadoop.io.Writable"],"exceptions":["java.io.IOException"]},"void reset()":{"name":"reset","returnType":"void","args":[],"exceptions":[]},"boolean replay(org.apache.hadoop.io.Writable) throws java.io.IOException":{"name":"replay","returnType":"boolean","args":["org.apache.hadoop.io.Writable"],"exceptions":["java.io.IOException"]},"boolean hasNext()":{"name":"hasNext","returnType":"boolean","args":[],"exceptions":[]},"void close() throws java.io.IOException":{"name":"close","returnType":"void","args":[],"exceptions":["java.io.IOException"]},"void clear()":{"name":"clear","returnType":"void","args":[],"exceptions":[]}}},"org.apache.hadoop.mapred.JobPriority":{"name":"org.apache.hadoop.mapred.JobPriority","methods":{"[Lorg.apache.hadoop.mapred.JobPriority; values()":{"name":"values","returnType":"[Lorg.apache.hadoop.mapred.JobPriority;","args":[],"exceptions":[]},"org.apache.hadoop.mapred.JobPriority valueOf(java.lang.String)":{"name":"valueOf","returnType":"org.apache.hadoop.mapred.JobPriority","args":["java.lang.String"],"exceptions":[]}}},"org.apache.hadoop.mapreduce.lib.output.FileOutputFormat":{"name":"org.apache.hadoop.mapreduce.lib.output.FileOutputFormat","methods":{"boolean getCompressOutput(org.apache.hadoop.mapreduce.JobContext)":{"name":"getCompressOutput","returnType":"boolean","args":["org.apache.hadoop.mapreduce.JobContext"],"exceptions":[]},"void checkOutputSpecs(org.apache.hadoop.mapreduce.JobContext) throws java.io.IOException, org.apache.hadoop.mapred.FileAlreadyExistsException":{"name":"checkOutputSpecs","returnType":"void","args":["org.apache.hadoop.mapreduce.JobContext"],"exceptions":["java.io.IOException","org.apache.hadoop.mapred.FileAlreadyExistsException"]},"org.apache.hadoop.fs.Path getWorkOutputPath(org.apache.hadoop.mapreduce.TaskInputOutputContext) throws java.lang.InterruptedException, java.io.IOException":{"name":"getWorkOutputPath","returnType":"org.apache.hadoop.fs.Path","args":["org.apache.hadoop.mapreduce.TaskInputOutputContext"],"exceptions":["java.lang.InterruptedException","java.io.IOException"]},"void setOutputPath(org.apache.hadoop.mapreduce.Job, org.apache.hadoop.fs.Path)":{"name":"setOutputPath","returnType":"void","args":["org.apache.hadoop.mapreduce.Job","org.apache.hadoop.fs.Path"],"exceptions":[]},"void setOutputCompressorClass(org.apache.hadoop.mapreduce.Job, java.lang.Class)":{"name":"setOutputCompressorClass","returnType":"void","args":["org.apache.hadoop.mapreduce.Job","java.lang.Class"],"exceptions":[]},"org.apache.hadoop.fs.Path getPathForWorkFile(org.apache.hadoop.mapreduce.TaskInputOutputContext, java.lang.String, java.lang.String) throws java.lang.InterruptedException, java.io.IOException":{"name":"getPathForWorkFile","returnType":"org.apache.hadoop.fs.Path","args":["org.apache.hadoop.mapreduce.TaskInputOutputContext","java.lang.String","java.lang.String"],"exceptions":["java.lang.InterruptedException","java.io.IOException"]},"java.lang.String getUniqueFile(org.apache.hadoop.mapreduce.TaskAttemptContext, java.lang.String, java.lang.String)":{"name":"getUniqueFile","returnType":"java.lang.String","args":["org.apache.hadoop.mapreduce.TaskAttemptContext","java.lang.String","java.lang.String"],"exceptions":[]},"org.apache.hadoop.fs.Path getOutputPath(org.apache.hadoop.mapreduce.JobContext)":{"name":"getOutputPath","returnType":"org.apache.hadoop.fs.Path","args":["org.apache.hadoop.mapreduce.JobContext"],"exceptions":[]},"org.apache.hadoop.mapreduce.OutputCommitter getOutputCommitter(org.apache.hadoop.mapreduce.TaskAttemptContext) throws java.io.IOException":{"name":"getOutputCommitter","returnType":"org.apache.hadoop.mapreduce.OutputCommitter","args":["org.apache.hadoop.mapreduce.TaskAttemptContext"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.mapreduce.RecordWriter getRecordWriter(org.apache.hadoop.mapreduce.TaskAttemptContext) throws java.lang.InterruptedException, java.io.IOException":{"name":"getRecordWriter","returnType":"org.apache.hadoop.mapreduce.RecordWriter","args":["org.apache.hadoop.mapreduce.TaskAttemptContext"],"exceptions":["java.lang.InterruptedException","java.io.IOException"]},"java.lang.Class getOutputCompressorClass(org.apache.hadoop.mapreduce.JobContext, java.lang.Class)":{"name":"getOutputCompressorClass","returnType":"java.lang.Class","args":["org.apache.hadoop.mapreduce.JobContext","java.lang.Class"],"exceptions":[]},"org.apache.hadoop.fs.Path getDefaultWorkFile(org.apache.hadoop.mapreduce.TaskAttemptContext, java.lang.String) throws java.io.IOException":{"name":"getDefaultWorkFile","returnType":"org.apache.hadoop.fs.Path","args":["org.apache.hadoop.mapreduce.TaskAttemptContext","java.lang.String"],"exceptions":["java.io.IOException"]},"void setCompressOutput(org.apache.hadoop.mapreduce.Job, boolean)":{"name":"setCompressOutput","returnType":"void","args":["org.apache.hadoop.mapreduce.Job","boolean"],"exceptions":[]}}},"org.apache.hadoop.mapreduce.lib.aggregate.ValueAggregatorReducer":{"name":"org.apache.hadoop.mapreduce.lib.aggregate.ValueAggregatorReducer","methods":{"void reduce(java.lang.Object, java.lang.Iterable, org.apache.hadoop.mapreduce.Reducer$Context) throws java.lang.InterruptedException, java.io.IOException":{"name":"reduce","returnType":"void","args":["java.lang.Object","java.lang.Iterable","org.apache.hadoop.mapreduce.Reducer$Context"],"exceptions":["java.lang.InterruptedException","java.io.IOException"]},"void reduce(org.apache.hadoop.io.Text, java.lang.Iterable, org.apache.hadoop.mapreduce.Reducer$Context) throws java.lang.InterruptedException, java.io.IOException":{"name":"reduce","returnType":"void","args":["org.apache.hadoop.io.Text","java.lang.Iterable","org.apache.hadoop.mapreduce.Reducer$Context"],"exceptions":["java.lang.InterruptedException","java.io.IOException"]},"void setup(org.apache.hadoop.mapreduce.Reducer$Context) throws java.lang.InterruptedException, java.io.IOException":{"name":"setup","returnType":"void","args":["org.apache.hadoop.mapreduce.Reducer$Context"],"exceptions":["java.lang.InterruptedException","java.io.IOException"]}}},"org.apache.hadoop.mapreduce.lib.input.CombineFileInputFormat":{"name":"org.apache.hadoop.mapreduce.lib.input.CombineFileInputFormat","methods":{"org.apache.hadoop.mapreduce.RecordReader createRecordReader(org.apache.hadoop.mapreduce.InputSplit, org.apache.hadoop.mapreduce.TaskAttemptContext) throws java.io.IOException":{"name":"createRecordReader","returnType":"org.apache.hadoop.mapreduce.RecordReader","args":["org.apache.hadoop.mapreduce.InputSplit","org.apache.hadoop.mapreduce.TaskAttemptContext"],"exceptions":["java.io.IOException"]},"java.util.List getSplits(org.apache.hadoop.mapreduce.JobContext) throws java.io.IOException":{"name":"getSplits","returnType":"java.util.List","args":["org.apache.hadoop.mapreduce.JobContext"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.mapred.join.CompositeRecordReader":{"name":"org.apache.hadoop.mapred.join.CompositeRecordReader","methods":{"int id()":{"name":"id","returnType":"int","args":[],"exceptions":[]},"long getPos() throws java.io.IOException":{"name":"getPos","returnType":"long","args":[],"exceptions":["java.io.IOException"]},"float getProgress() throws java.io.IOException":{"name":"getProgress","returnType":"float","args":[],"exceptions":["java.io.IOException"]},"org.apache.hadoop.conf.Configuration getConf()":{"name":"getConf","returnType":"org.apache.hadoop.conf.Configuration","args":[],"exceptions":[]},"void accept(org.apache.hadoop.mapred.join.CompositeRecordReader$JoinCollector, org.apache.hadoop.io.WritableComparable) throws java.io.IOException":{"name":"accept","returnType":"void","args":["org.apache.hadoop.mapred.join.CompositeRecordReader$JoinCollector","org.apache.hadoop.io.WritableComparable"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.io.WritableComparable key()":{"name":"key","returnType":"org.apache.hadoop.io.WritableComparable","args":[],"exceptions":[]},"void skip(org.apache.hadoop.io.WritableComparable) throws java.io.IOException":{"name":"skip","returnType":"void","args":["org.apache.hadoop.io.WritableComparable"],"exceptions":["java.io.IOException"]},"boolean hasNext()":{"name":"hasNext","returnType":"boolean","args":[],"exceptions":[]},"int compareTo(org.apache.hadoop.mapred.join.ComposableRecordReader)":{"name":"compareTo","returnType":"int","args":["org.apache.hadoop.mapred.join.ComposableRecordReader"],"exceptions":[]},"void add(org.apache.hadoop.mapred.join.ComposableRecordReader) throws java.io.IOException":{"name":"add","returnType":"void","args":["org.apache.hadoop.mapred.join.ComposableRecordReader"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.io.WritableComparable createKey()":{"name":"createKey","returnType":"org.apache.hadoop.io.WritableComparable","args":[],"exceptions":[]},"void setConf(org.apache.hadoop.conf.Configuration)":{"name":"setConf","returnType":"void","args":["org.apache.hadoop.conf.Configuration"],"exceptions":[]},"void close() throws java.io.IOException":{"name":"close","returnType":"void","args":[],"exceptions":["java.io.IOException"]},"void key(org.apache.hadoop.io.WritableComparable) throws java.io.IOException":{"name":"key","returnType":"void","args":["org.apache.hadoop.io.WritableComparable"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.mapreduce.lib.chain.ChainReducer":{"name":"org.apache.hadoop.mapreduce.lib.chain.ChainReducer","methods":{"void addMapper(org.apache.hadoop.mapreduce.Job, java.lang.Class, java.lang.Class, java.lang.Class, java.lang.Class, java.lang.Class, org.apache.hadoop.conf.Configuration) throws java.io.IOException":{"name":"addMapper","returnType":"void","args":["org.apache.hadoop.mapreduce.Job","java.lang.Class","java.lang.Class","java.lang.Class","java.lang.Class","java.lang.Class","org.apache.hadoop.conf.Configuration"],"exceptions":["java.io.IOException"]},"void setReducer(org.apache.hadoop.mapreduce.Job, java.lang.Class, java.lang.Class, java.lang.Class, java.lang.Class, java.lang.Class, org.apache.hadoop.conf.Configuration)":{"name":"setReducer","returnType":"void","args":["org.apache.hadoop.mapreduce.Job","java.lang.Class","java.lang.Class","java.lang.Class","java.lang.Class","java.lang.Class","org.apache.hadoop.conf.Configuration"],"exceptions":[]},"void run(org.apache.hadoop.mapreduce.Reducer$Context) throws java.lang.InterruptedException, java.io.IOException":{"name":"run","returnType":"void","args":["org.apache.hadoop.mapreduce.Reducer$Context"],"exceptions":["java.lang.InterruptedException","java.io.IOException"]}}},"org.apache.hadoop.mapreduce.ID":{"name":"org.apache.hadoop.mapreduce.ID","methods":{"int hashCode()":{"name":"hashCode","returnType":"int","args":[],"exceptions":[]},"int compareTo(org.apache.hadoop.mapreduce.ID)":{"name":"compareTo","returnType":"int","args":["org.apache.hadoop.mapreduce.ID"],"exceptions":[]},"java.lang.String toString()":{"name":"toString","returnType":"java.lang.String","args":[],"exceptions":[]},"void write(java.io.DataOutput) throws java.io.IOException":{"name":"write","returnType":"void","args":["java.io.DataOutput"],"exceptions":["java.io.IOException"]},"int getId()":{"name":"getId","returnType":"int","args":[],"exceptions":[]},"int compareTo(java.lang.Object)":{"name":"compareTo","returnType":"int","args":["java.lang.Object"],"exceptions":[]},"boolean equals(java.lang.Object)":{"name":"equals","returnType":"boolean","args":["java.lang.Object"],"exceptions":[]},"void readFields(java.io.DataInput) throws java.io.IOException":{"name":"readFields","returnType":"void","args":["java.io.DataInput"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.mapred.SequenceFileAsTextRecordReader":{"name":"org.apache.hadoop.mapred.SequenceFileAsTextRecordReader","methods":{"boolean next(org.apache.hadoop.io.Text, org.apache.hadoop.io.Text) throws java.io.IOException":{"name":"next","returnType":"boolean","args":["org.apache.hadoop.io.Text","org.apache.hadoop.io.Text"],"exceptions":["java.io.IOException"]},"long getPos() throws java.io.IOException":{"name":"getPos","returnType":"long","args":[],"exceptions":["java.io.IOException"]},"float getProgress() throws java.io.IOException":{"name":"getProgress","returnType":"float","args":[],"exceptions":["java.io.IOException"]},"java.lang.Object createKey()":{"name":"createKey","returnType":"java.lang.Object","args":[],"exceptions":[]},"java.lang.Object createValue()":{"name":"createValue","returnType":"java.lang.Object","args":[],"exceptions":[]},"org.apache.hadoop.io.Text createValue()":{"name":"createValue","returnType":"org.apache.hadoop.io.Text","args":[],"exceptions":[]},"org.apache.hadoop.io.Text createKey()":{"name":"createKey","returnType":"org.apache.hadoop.io.Text","args":[],"exceptions":[]},"boolean next(java.lang.Object, java.lang.Object) throws java.io.IOException":{"name":"next","returnType":"boolean","args":["java.lang.Object","java.lang.Object"],"exceptions":["java.io.IOException"]},"void close() throws java.io.IOException":{"name":"close","returnType":"void","args":[],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.mapreduce.lib.partition.HashPartitioner":{"name":"org.apache.hadoop.mapreduce.lib.partition.HashPartitioner","methods":{"int getPartition(java.lang.Object, java.lang.Object, int)":{"name":"getPartition","returnType":"int","args":["java.lang.Object","java.lang.Object","int"],"exceptions":[]}}},"org.apache.hadoop.mapred.lib.ChainReducer":{"name":"org.apache.hadoop.mapred.lib.ChainReducer","methods":{"void setReducer(org.apache.hadoop.mapred.JobConf, java.lang.Class, java.lang.Class, java.lang.Class, java.lang.Class, java.lang.Class, boolean, org.apache.hadoop.mapred.JobConf)":{"name":"setReducer","returnType":"void","args":["org.apache.hadoop.mapred.JobConf","java.lang.Class","java.lang.Class","java.lang.Class","java.lang.Class","java.lang.Class","boolean","org.apache.hadoop.mapred.JobConf"],"exceptions":[]},"void reduce(java.lang.Object, java.util.Iterator, org.apache.hadoop.mapred.OutputCollector, org.apache.hadoop.mapred.Reporter) throws java.io.IOException":{"name":"reduce","returnType":"void","args":["java.lang.Object","java.util.Iterator","org.apache.hadoop.mapred.OutputCollector","org.apache.hadoop.mapred.Reporter"],"exceptions":["java.io.IOException"]},"void addMapper(org.apache.hadoop.mapred.JobConf, java.lang.Class, java.lang.Class, java.lang.Class, java.lang.Class, java.lang.Class, boolean, org.apache.hadoop.mapred.JobConf)":{"name":"addMapper","returnType":"void","args":["org.apache.hadoop.mapred.JobConf","java.lang.Class","java.lang.Class","java.lang.Class","java.lang.Class","java.lang.Class","boolean","org.apache.hadoop.mapred.JobConf"],"exceptions":[]},"void configure(org.apache.hadoop.mapred.JobConf)":{"name":"configure","returnType":"void","args":["org.apache.hadoop.mapred.JobConf"],"exceptions":[]},"void close() throws java.io.IOException":{"name":"close","returnType":"void","args":[],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.mapred.lib.MultipleInputs":{"name":"org.apache.hadoop.mapred.lib.MultipleInputs","methods":{"void addInputPath(org.apache.hadoop.mapred.JobConf, org.apache.hadoop.fs.Path, java.lang.Class)":{"name":"addInputPath","returnType":"void","args":["org.apache.hadoop.mapred.JobConf","org.apache.hadoop.fs.Path","java.lang.Class"],"exceptions":[]},"void addInputPath(org.apache.hadoop.mapred.JobConf, org.apache.hadoop.fs.Path, java.lang.Class, java.lang.Class)":{"name":"addInputPath","returnType":"void","args":["org.apache.hadoop.mapred.JobConf","org.apache.hadoop.fs.Path","java.lang.Class","java.lang.Class"],"exceptions":[]}}},"org.apache.hadoop.mapred.SequenceFileOutputFormat":{"name":"org.apache.hadoop.mapred.SequenceFileOutputFormat","methods":{"[Lorg.apache.hadoop.io.SequenceFile$Reader; getReaders(org.apache.hadoop.conf.Configuration, org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"getReaders","returnType":"[Lorg.apache.hadoop.io.SequenceFile$Reader;","args":["org.apache.hadoop.conf.Configuration","org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"void setOutputCompressionType(org.apache.hadoop.mapred.JobConf, org.apache.hadoop.io.SequenceFile$CompressionType)":{"name":"setOutputCompressionType","returnType":"void","args":["org.apache.hadoop.mapred.JobConf","org.apache.hadoop.io.SequenceFile$CompressionType"],"exceptions":[]},"org.apache.hadoop.io.SequenceFile$CompressionType getOutputCompressionType(org.apache.hadoop.mapred.JobConf)":{"name":"getOutputCompressionType","returnType":"org.apache.hadoop.io.SequenceFile$CompressionType","args":["org.apache.hadoop.mapred.JobConf"],"exceptions":[]},"org.apache.hadoop.mapred.RecordWriter getRecordWriter(org.apache.hadoop.fs.FileSystem, org.apache.hadoop.mapred.JobConf, java.lang.String, org.apache.hadoop.util.Progressable) throws java.io.IOException":{"name":"getRecordWriter","returnType":"org.apache.hadoop.mapred.RecordWriter","args":["org.apache.hadoop.fs.FileSystem","org.apache.hadoop.mapred.JobConf","java.lang.String","org.apache.hadoop.util.Progressable"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.mapreduce.lib.aggregate.ValueAggregatorDescriptor":{"name":"org.apache.hadoop.mapreduce.lib.aggregate.ValueAggregatorDescriptor","methods":{"java.util.ArrayList generateKeyValPairs(java.lang.Object, java.lang.Object)":{"name":"generateKeyValPairs","returnType":"java.util.ArrayList","args":["java.lang.Object","java.lang.Object"],"exceptions":[]},"void configure(org.apache.hadoop.conf.Configuration)":{"name":"configure","returnType":"void","args":["org.apache.hadoop.conf.Configuration"],"exceptions":[]}}},"org.apache.hadoop.mapreduce.lib.map.MultithreadedMapper":{"name":"org.apache.hadoop.mapreduce.lib.map.MultithreadedMapper","methods":{"void run(org.apache.hadoop.mapreduce.Mapper$Context) throws java.lang.InterruptedException, java.io.IOException":{"name":"run","returnType":"void","args":["org.apache.hadoop.mapreduce.Mapper$Context"],"exceptions":["java.lang.InterruptedException","java.io.IOException"]},"void setMapperClass(org.apache.hadoop.mapreduce.Job, java.lang.Class)":{"name":"setMapperClass","returnType":"void","args":["org.apache.hadoop.mapreduce.Job","java.lang.Class"],"exceptions":[]},"int getNumberOfThreads(org.apache.hadoop.mapreduce.JobContext)":{"name":"getNumberOfThreads","returnType":"int","args":["org.apache.hadoop.mapreduce.JobContext"],"exceptions":[]},"java.lang.Class getMapperClass(org.apache.hadoop.mapreduce.JobContext)":{"name":"getMapperClass","returnType":"java.lang.Class","args":["org.apache.hadoop.mapreduce.JobContext"],"exceptions":[]},"void setNumberOfThreads(org.apache.hadoop.mapreduce.Job, int)":{"name":"setNumberOfThreads","returnType":"void","args":["org.apache.hadoop.mapreduce.Job","int"],"exceptions":[]}}},"org.apache.hadoop.mapred.Mapper":{"name":"org.apache.hadoop.mapred.Mapper","methods":{"void map(java.lang.Object, java.lang.Object, org.apache.hadoop.mapred.OutputCollector, org.apache.hadoop.mapred.Reporter) throws java.io.IOException":{"name":"map","returnType":"void","args":["java.lang.Object","java.lang.Object","org.apache.hadoop.mapred.OutputCollector","org.apache.hadoop.mapred.Reporter"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.mapreduce.TaskType":{"name":"org.apache.hadoop.mapreduce.TaskType","methods":{"[Lorg.apache.hadoop.mapreduce.TaskType; values()":{"name":"values","returnType":"[Lorg.apache.hadoop.mapreduce.TaskType;","args":[],"exceptions":[]},"org.apache.hadoop.mapreduce.TaskType valueOf(java.lang.String)":{"name":"valueOf","returnType":"org.apache.hadoop.mapreduce.TaskType","args":["java.lang.String"],"exceptions":[]}}},"org.apache.hadoop.mapreduce.lib.input.TextInputFormat":{"name":"org.apache.hadoop.mapreduce.lib.input.TextInputFormat","methods":{"org.apache.hadoop.mapreduce.RecordReader createRecordReader(org.apache.hadoop.mapreduce.InputSplit, org.apache.hadoop.mapreduce.TaskAttemptContext)":{"name":"createRecordReader","returnType":"org.apache.hadoop.mapreduce.RecordReader","args":["org.apache.hadoop.mapreduce.InputSplit","org.apache.hadoop.mapreduce.TaskAttemptContext"],"exceptions":[]}}},"org.apache.hadoop.mapred.OutputCommitter":{"name":"org.apache.hadoop.mapred.OutputCommitter","methods":{"void cleanupJob(org.apache.hadoop.mapred.JobContext) throws java.io.IOException":{"name":"cleanupJob","returnType":"void","args":["org.apache.hadoop.mapred.JobContext"],"exceptions":["java.io.IOException"]},"void commitJob(org.apache.hadoop.mapreduce.JobContext) throws java.io.IOException":{"name":"commitJob","returnType":"void","args":["org.apache.hadoop.mapreduce.JobContext"],"exceptions":["java.io.IOException"]},"void setupTask(org.apache.hadoop.mapred.TaskAttemptContext) throws java.io.IOException":{"name":"setupTask","returnType":"void","args":["org.apache.hadoop.mapred.TaskAttemptContext"],"exceptions":["java.io.IOException"]},"void abortTask(org.apache.hadoop.mapreduce.TaskAttemptContext) throws java.io.IOException":{"name":"abortTask","returnType":"void","args":["org.apache.hadoop.mapreduce.TaskAttemptContext"],"exceptions":["java.io.IOException"]},"void recoverTask(org.apache.hadoop.mapreduce.TaskAttemptContext) throws java.io.IOException":{"name":"recoverTask","returnType":"void","args":["org.apache.hadoop.mapreduce.TaskAttemptContext"],"exceptions":["java.io.IOException"]},"boolean isCommitJobRepeatable(org.apache.hadoop.mapred.JobContext) throws java.io.IOException":{"name":"isCommitJobRepeatable","returnType":"boolean","args":["org.apache.hadoop.mapred.JobContext"],"exceptions":["java.io.IOException"]},"void cleanupJob(org.apache.hadoop.mapreduce.JobContext) throws java.io.IOException":{"name":"cleanupJob","returnType":"void","args":["org.apache.hadoop.mapreduce.JobContext"],"exceptions":["java.io.IOException"]},"void abortJob(org.apache.hadoop.mapreduce.JobContext, org.apache.hadoop.mapreduce.JobStatus$State) throws java.io.IOException":{"name":"abortJob","returnType":"void","args":["org.apache.hadoop.mapreduce.JobContext","org.apache.hadoop.mapreduce.JobStatus$State"],"exceptions":["java.io.IOException"]},"void abortTask(org.apache.hadoop.mapred.TaskAttemptContext) throws java.io.IOException":{"name":"abortTask","returnType":"void","args":["org.apache.hadoop.mapred.TaskAttemptContext"],"exceptions":["java.io.IOException"]},"boolean needsTaskCommit(org.apache.hadoop.mapred.TaskAttemptContext) throws java.io.IOException":{"name":"needsTaskCommit","returnType":"boolean","args":["org.apache.hadoop.mapred.TaskAttemptContext"],"exceptions":["java.io.IOException"]},"boolean isCommitJobRepeatable(org.apache.hadoop.mapreduce.JobContext) throws java.io.IOException":{"name":"isCommitJobRepeatable","returnType":"boolean","args":["org.apache.hadoop.mapreduce.JobContext"],"exceptions":["java.io.IOException"]},"void abortJob(org.apache.hadoop.mapred.JobContext, int) throws java.io.IOException":{"name":"abortJob","returnType":"void","args":["org.apache.hadoop.mapred.JobContext","int"],"exceptions":["java.io.IOException"]},"void commitTask(org.apache.hadoop.mapred.TaskAttemptContext) throws java.io.IOException":{"name":"commitTask","returnType":"void","args":["org.apache.hadoop.mapred.TaskAttemptContext"],"exceptions":["java.io.IOException"]},"boolean isRecoverySupported(org.apache.hadoop.mapreduce.JobContext) throws java.io.IOException":{"name":"isRecoverySupported","returnType":"boolean","args":["org.apache.hadoop.mapreduce.JobContext"],"exceptions":["java.io.IOException"]},"boolean needsTaskCommit(org.apache.hadoop.mapreduce.TaskAttemptContext) throws java.io.IOException":{"name":"needsTaskCommit","returnType":"boolean","args":["org.apache.hadoop.mapreduce.TaskAttemptContext"],"exceptions":["java.io.IOException"]},"void setupJob(org.apache.hadoop.mapreduce.JobContext) throws java.io.IOException":{"name":"setupJob","returnType":"void","args":["org.apache.hadoop.mapreduce.JobContext"],"exceptions":["java.io.IOException"]},"void setupTask(org.apache.hadoop.mapreduce.TaskAttemptContext) throws java.io.IOException":{"name":"setupTask","returnType":"void","args":["org.apache.hadoop.mapreduce.TaskAttemptContext"],"exceptions":["java.io.IOException"]},"void commitTask(org.apache.hadoop.mapreduce.TaskAttemptContext) throws java.io.IOException":{"name":"commitTask","returnType":"void","args":["org.apache.hadoop.mapreduce.TaskAttemptContext"],"exceptions":["java.io.IOException"]},"boolean isRecoverySupported(org.apache.hadoop.mapred.JobContext) throws java.io.IOException":{"name":"isRecoverySupported","returnType":"boolean","args":["org.apache.hadoop.mapred.JobContext"],"exceptions":["java.io.IOException"]},"void setupJob(org.apache.hadoop.mapred.JobContext) throws java.io.IOException":{"name":"setupJob","returnType":"void","args":["org.apache.hadoop.mapred.JobContext"],"exceptions":["java.io.IOException"]},"void commitJob(org.apache.hadoop.mapred.JobContext) throws java.io.IOException":{"name":"commitJob","returnType":"void","args":["org.apache.hadoop.mapred.JobContext"],"exceptions":["java.io.IOException"]},"void recoverTask(org.apache.hadoop.mapred.TaskAttemptContext) throws java.io.IOException":{"name":"recoverTask","returnType":"void","args":["org.apache.hadoop.mapred.TaskAttemptContext"],"exceptions":["java.io.IOException"]},"boolean isRecoverySupported()":{"name":"isRecoverySupported","returnType":"boolean","args":[],"exceptions":[]}}},"org.apache.hadoop.mapred.InputFormat":{"name":"org.apache.hadoop.mapred.InputFormat","methods":{"org.apache.hadoop.mapred.RecordReader getRecordReader(org.apache.hadoop.mapred.InputSplit, org.apache.hadoop.mapred.JobConf, org.apache.hadoop.mapred.Reporter) throws java.io.IOException":{"name":"getRecordReader","returnType":"org.apache.hadoop.mapred.RecordReader","args":["org.apache.hadoop.mapred.InputSplit","org.apache.hadoop.mapred.JobConf","org.apache.hadoop.mapred.Reporter"],"exceptions":["java.io.IOException"]},"[Lorg.apache.hadoop.mapred.InputSplit; getSplits(org.apache.hadoop.mapred.JobConf, int) throws java.io.IOException":{"name":"getSplits","returnType":"[Lorg.apache.hadoop.mapred.InputSplit;","args":["org.apache.hadoop.mapred.JobConf","int"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.mapred.SkipBadRecords":{"name":"org.apache.hadoop.mapred.SkipBadRecords","methods":{"void setMapperMaxSkipRecords(org.apache.hadoop.conf.Configuration, long)":{"name":"setMapperMaxSkipRecords","returnType":"void","args":["org.apache.hadoop.conf.Configuration","long"],"exceptions":[]},"int getAttemptsToStartSkipping(org.apache.hadoop.conf.Configuration)":{"name":"getAttemptsToStartSkipping","returnType":"int","args":["org.apache.hadoop.conf.Configuration"],"exceptions":[]},"long getReducerMaxSkipGroups(org.apache.hadoop.conf.Configuration)":{"name":"getReducerMaxSkipGroups","returnType":"long","args":["org.apache.hadoop.conf.Configuration"],"exceptions":[]},"long getMapperMaxSkipRecords(org.apache.hadoop.conf.Configuration)":{"name":"getMapperMaxSkipRecords","returnType":"long","args":["org.apache.hadoop.conf.Configuration"],"exceptions":[]},"void setSkipOutputPath(org.apache.hadoop.mapred.JobConf, org.apache.hadoop.fs.Path)":{"name":"setSkipOutputPath","returnType":"void","args":["org.apache.hadoop.mapred.JobConf","org.apache.hadoop.fs.Path"],"exceptions":[]},"void setReducerMaxSkipGroups(org.apache.hadoop.conf.Configuration, long)":{"name":"setReducerMaxSkipGroups","returnType":"void","args":["org.apache.hadoop.conf.Configuration","long"],"exceptions":[]},"void setAutoIncrMapperProcCount(org.apache.hadoop.conf.Configuration, boolean)":{"name":"setAutoIncrMapperProcCount","returnType":"void","args":["org.apache.hadoop.conf.Configuration","boolean"],"exceptions":[]},"boolean getAutoIncrReducerProcCount(org.apache.hadoop.conf.Configuration)":{"name":"getAutoIncrReducerProcCount","returnType":"boolean","args":["org.apache.hadoop.conf.Configuration"],"exceptions":[]},"boolean getAutoIncrMapperProcCount(org.apache.hadoop.conf.Configuration)":{"name":"getAutoIncrMapperProcCount","returnType":"boolean","args":["org.apache.hadoop.conf.Configuration"],"exceptions":[]},"org.apache.hadoop.fs.Path getSkipOutputPath(org.apache.hadoop.conf.Configuration)":{"name":"getSkipOutputPath","returnType":"org.apache.hadoop.fs.Path","args":["org.apache.hadoop.conf.Configuration"],"exceptions":[]},"void setAutoIncrReducerProcCount(org.apache.hadoop.conf.Configuration, boolean)":{"name":"setAutoIncrReducerProcCount","returnType":"void","args":["org.apache.hadoop.conf.Configuration","boolean"],"exceptions":[]},"void setAttemptsToStartSkipping(org.apache.hadoop.conf.Configuration, int)":{"name":"setAttemptsToStartSkipping","returnType":"void","args":["org.apache.hadoop.conf.Configuration","int"],"exceptions":[]}}},"org.apache.hadoop.mapred.TaskAttemptID":{"name":"org.apache.hadoop.mapred.TaskAttemptID","methods":{"org.apache.hadoop.mapred.TaskAttemptID read(java.io.DataInput) throws java.io.IOException":{"name":"read","returnType":"org.apache.hadoop.mapred.TaskAttemptID","args":["java.io.DataInput"],"exceptions":["java.io.IOException"]},"java.lang.String getTaskAttemptIDsPattern(java.lang.String, java.lang.Integer, org.apache.hadoop.mapreduce.TaskType, java.lang.Integer, java.lang.Integer)":{"name":"getTaskAttemptIDsPattern","returnType":"java.lang.String","args":["java.lang.String","java.lang.Integer","org.apache.hadoop.mapreduce.TaskType","java.lang.Integer","java.lang.Integer"],"exceptions":[]},"org.apache.hadoop.mapred.TaskAttemptID forName(java.lang.String) throws java.lang.IllegalArgumentException":{"name":"forName","returnType":"org.apache.hadoop.mapred.TaskAttemptID","args":["java.lang.String"],"exceptions":["java.lang.IllegalArgumentException"]},"org.apache.hadoop.mapreduce.TaskID getTaskID()":{"name":"getTaskID","returnType":"org.apache.hadoop.mapreduce.TaskID","args":[],"exceptions":[]},"org.apache.hadoop.mapred.JobID getJobID()":{"name":"getJobID","returnType":"org.apache.hadoop.mapred.JobID","args":[],"exceptions":[]},"org.apache.hadoop.mapreduce.JobID getJobID()":{"name":"getJobID","returnType":"org.apache.hadoop.mapreduce.JobID","args":[],"exceptions":[]},"org.apache.hadoop.mapred.TaskID getTaskID()":{"name":"getTaskID","returnType":"org.apache.hadoop.mapred.TaskID","args":[],"exceptions":[]},"org.apache.hadoop.mapred.TaskAttemptID downgrade(org.apache.hadoop.mapreduce.TaskAttemptID)":{"name":"downgrade","returnType":"org.apache.hadoop.mapred.TaskAttemptID","args":["org.apache.hadoop.mapreduce.TaskAttemptID"],"exceptions":[]},"java.lang.String getTaskAttemptIDsPattern(java.lang.String, java.lang.Integer, java.lang.Boolean, java.lang.Integer, java.lang.Integer)":{"name":"getTaskAttemptIDsPattern","returnType":"java.lang.String","args":["java.lang.String","java.lang.Integer","java.lang.Boolean","java.lang.Integer","java.lang.Integer"],"exceptions":[]}}},"org.apache.hadoop.mapreduce.lib.aggregate.ValueAggregatorCombiner":{"name":"org.apache.hadoop.mapreduce.lib.aggregate.ValueAggregatorCombiner","methods":{"void reduce(java.lang.Object, java.lang.Iterable, org.apache.hadoop.mapreduce.Reducer$Context) throws java.lang.InterruptedException, java.io.IOException":{"name":"reduce","returnType":"void","args":["java.lang.Object","java.lang.Iterable","org.apache.hadoop.mapreduce.Reducer$Context"],"exceptions":["java.lang.InterruptedException","java.io.IOException"]},"void reduce(org.apache.hadoop.io.Text, java.lang.Iterable, org.apache.hadoop.mapreduce.Reducer$Context) throws java.lang.InterruptedException, java.io.IOException":{"name":"reduce","returnType":"void","args":["org.apache.hadoop.io.Text","java.lang.Iterable","org.apache.hadoop.mapreduce.Reducer$Context"],"exceptions":["java.lang.InterruptedException","java.io.IOException"]}}},"org.apache.hadoop.mapred.OutputFormat":{"name":"org.apache.hadoop.mapred.OutputFormat","methods":{"void checkOutputSpecs(org.apache.hadoop.fs.FileSystem, org.apache.hadoop.mapred.JobConf) throws java.io.IOException":{"name":"checkOutputSpecs","returnType":"void","args":["org.apache.hadoop.fs.FileSystem","org.apache.hadoop.mapred.JobConf"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.mapred.RecordWriter getRecordWriter(org.apache.hadoop.fs.FileSystem, org.apache.hadoop.mapred.JobConf, java.lang.String, org.apache.hadoop.util.Progressable) throws java.io.IOException":{"name":"getRecordWriter","returnType":"org.apache.hadoop.mapred.RecordWriter","args":["org.apache.hadoop.fs.FileSystem","org.apache.hadoop.mapred.JobConf","java.lang.String","org.apache.hadoop.util.Progressable"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.mapred.RunningJob":{"name":"org.apache.hadoop.mapred.RunningJob","methods":{"void setJobPriority(java.lang.String) throws java.io.IOException":{"name":"setJobPriority","returnType":"void","args":["java.lang.String"],"exceptions":["java.io.IOException"]},"boolean isComplete() throws java.io.IOException":{"name":"isComplete","returnType":"boolean","args":[],"exceptions":["java.io.IOException"]},"void killTask(java.lang.String, boolean) throws java.io.IOException":{"name":"killTask","returnType":"void","args":["java.lang.String","boolean"],"exceptions":["java.io.IOException"]},"float cleanupProgress() throws java.io.IOException":{"name":"cleanupProgress","returnType":"float","args":[],"exceptions":["java.io.IOException"]},"int getJobState() throws java.io.IOException":{"name":"getJobState","returnType":"int","args":[],"exceptions":["java.io.IOException"]},"boolean isSuccessful() throws java.io.IOException":{"name":"isSuccessful","returnType":"boolean","args":[],"exceptions":["java.io.IOException"]},"float mapProgress() throws java.io.IOException":{"name":"mapProgress","returnType":"float","args":[],"exceptions":["java.io.IOException"]},"float setupProgress() throws java.io.IOException":{"name":"setupProgress","returnType":"float","args":[],"exceptions":["java.io.IOException"]},"java.lang.String getFailureInfo() throws java.io.IOException":{"name":"getFailureInfo","returnType":"java.lang.String","args":[],"exceptions":["java.io.IOException"]},"java.lang.String getJobName()":{"name":"getJobName","returnType":"java.lang.String","args":[],"exceptions":[]},"org.apache.hadoop.mapred.JobID getID()":{"name":"getID","returnType":"org.apache.hadoop.mapred.JobID","args":[],"exceptions":[]},"org.apache.hadoop.mapred.JobStatus getJobStatus() throws java.io.IOException":{"name":"getJobStatus","returnType":"org.apache.hadoop.mapred.JobStatus","args":[],"exceptions":["java.io.IOException"]},"java.lang.String getJobID()":{"name":"getJobID","returnType":"java.lang.String","args":[],"exceptions":[]},"java.lang.String getJobFile()":{"name":"getJobFile","returnType":"java.lang.String","args":[],"exceptions":[]},"void waitForCompletion() throws java.io.IOException":{"name":"waitForCompletion","returnType":"void","args":[],"exceptions":["java.io.IOException"]},"[Lorg.apache.hadoop.mapred.TaskCompletionEvent; getTaskCompletionEvents(int) throws java.io.IOException":{"name":"getTaskCompletionEvents","returnType":"[Lorg.apache.hadoop.mapred.TaskCompletionEvent;","args":["int"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.mapred.Counters getCounters() throws java.io.IOException":{"name":"getCounters","returnType":"org.apache.hadoop.mapred.Counters","args":[],"exceptions":["java.io.IOException"]},"java.lang.String getHistoryUrl() throws java.io.IOException":{"name":"getHistoryUrl","returnType":"java.lang.String","args":[],"exceptions":["java.io.IOException"]},"float reduceProgress() throws java.io.IOException":{"name":"reduceProgress","returnType":"float","args":[],"exceptions":["java.io.IOException"]},"void killTask(org.apache.hadoop.mapred.TaskAttemptID, boolean) throws java.io.IOException":{"name":"killTask","returnType":"void","args":["org.apache.hadoop.mapred.TaskAttemptID","boolean"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.conf.Configuration getConfiguration()":{"name":"getConfiguration","returnType":"org.apache.hadoop.conf.Configuration","args":[],"exceptions":[]},"[Ljava.lang.String; getTaskDiagnostics(org.apache.hadoop.mapred.TaskAttemptID) throws java.io.IOException":{"name":"getTaskDiagnostics","returnType":"[Ljava.lang.String;","args":["org.apache.hadoop.mapred.TaskAttemptID"],"exceptions":["java.io.IOException"]},"boolean isRetired() throws java.io.IOException":{"name":"isRetired","returnType":"boolean","args":[],"exceptions":["java.io.IOException"]},"void killJob() throws java.io.IOException":{"name":"killJob","returnType":"void","args":[],"exceptions":["java.io.IOException"]},"java.lang.String getTrackingURL()":{"name":"getTrackingURL","returnType":"java.lang.String","args":[],"exceptions":[]}}},"org.apache.hadoop.mapred.FileInputFormat":{"name":"org.apache.hadoop.mapred.FileInputFormat","methods":{"void setInputPaths(org.apache.hadoop.mapred.JobConf, java.lang.String)":{"name":"setInputPaths","returnType":"void","args":["org.apache.hadoop.mapred.JobConf","java.lang.String"],"exceptions":[]},"org.apache.hadoop.mapred.RecordReader getRecordReader(org.apache.hadoop.mapred.InputSplit, org.apache.hadoop.mapred.JobConf, org.apache.hadoop.mapred.Reporter) throws java.io.IOException":{"name":"getRecordReader","returnType":"org.apache.hadoop.mapred.RecordReader","args":["org.apache.hadoop.mapred.InputSplit","org.apache.hadoop.mapred.JobConf","org.apache.hadoop.mapred.Reporter"],"exceptions":["java.io.IOException"]},"void setInputPathFilter(org.apache.hadoop.mapred.JobConf, java.lang.Class)":{"name":"setInputPathFilter","returnType":"void","args":["org.apache.hadoop.mapred.JobConf","java.lang.Class"],"exceptions":[]},"void addInputPaths(org.apache.hadoop.mapred.JobConf, java.lang.String)":{"name":"addInputPaths","returnType":"void","args":["org.apache.hadoop.mapred.JobConf","java.lang.String"],"exceptions":[]},"void setInputPaths(org.apache.hadoop.mapred.JobConf, [Lorg.apache.hadoop.fs.Path;)":{"name":"setInputPaths","returnType":"void","args":["org.apache.hadoop.mapred.JobConf","[Lorg.apache.hadoop.fs.Path;"],"exceptions":[]},"org.apache.hadoop.fs.PathFilter getInputPathFilter(org.apache.hadoop.mapred.JobConf)":{"name":"getInputPathFilter","returnType":"org.apache.hadoop.fs.PathFilter","args":["org.apache.hadoop.mapred.JobConf"],"exceptions":[]},"[Lorg.apache.hadoop.fs.Path; getInputPaths(org.apache.hadoop.mapred.JobConf)":{"name":"getInputPaths","returnType":"[Lorg.apache.hadoop.fs.Path;","args":["org.apache.hadoop.mapred.JobConf"],"exceptions":[]},"void addInputPath(org.apache.hadoop.mapred.JobConf, org.apache.hadoop.fs.Path)":{"name":"addInputPath","returnType":"void","args":["org.apache.hadoop.mapred.JobConf","org.apache.hadoop.fs.Path"],"exceptions":[]},"[Lorg.apache.hadoop.mapred.InputSplit; getSplits(org.apache.hadoop.mapred.JobConf, int) throws java.io.IOException":{"name":"getSplits","returnType":"[Lorg.apache.hadoop.mapred.InputSplit;","args":["org.apache.hadoop.mapred.JobConf","int"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.mapreduce.lib.input.SequenceFileAsTextRecordReader":{"name":"org.apache.hadoop.mapreduce.lib.input.SequenceFileAsTextRecordReader","methods":{"org.apache.hadoop.io.Text getCurrentValue() throws java.lang.InterruptedException, java.io.IOException":{"name":"getCurrentValue","returnType":"org.apache.hadoop.io.Text","args":[],"exceptions":["java.lang.InterruptedException","java.io.IOException"]},"java.lang.Object getCurrentValue() throws java.lang.InterruptedException, java.io.IOException":{"name":"getCurrentValue","returnType":"java.lang.Object","args":[],"exceptions":["java.lang.InterruptedException","java.io.IOException"]},"void initialize(org.apache.hadoop.mapreduce.InputSplit, org.apache.hadoop.mapreduce.TaskAttemptContext) throws java.lang.InterruptedException, java.io.IOException":{"name":"initialize","returnType":"void","args":["org.apache.hadoop.mapreduce.InputSplit","org.apache.hadoop.mapreduce.TaskAttemptContext"],"exceptions":["java.lang.InterruptedException","java.io.IOException"]},"org.apache.hadoop.io.Text getCurrentKey() throws java.lang.InterruptedException, java.io.IOException":{"name":"getCurrentKey","returnType":"org.apache.hadoop.io.Text","args":[],"exceptions":["java.lang.InterruptedException","java.io.IOException"]},"boolean nextKeyValue() throws java.lang.InterruptedException, java.io.IOException":{"name":"nextKeyValue","returnType":"boolean","args":[],"exceptions":["java.lang.InterruptedException","java.io.IOException"]},"float getProgress() throws java.lang.InterruptedException, java.io.IOException":{"name":"getProgress","returnType":"float","args":[],"exceptions":["java.lang.InterruptedException","java.io.IOException"]},"java.lang.Object getCurrentKey() throws java.lang.InterruptedException, java.io.IOException":{"name":"getCurrentKey","returnType":"java.lang.Object","args":[],"exceptions":["java.lang.InterruptedException","java.io.IOException"]},"void close() throws java.io.IOException":{"name":"close","returnType":"void","args":[],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.mapreduce.lib.output.MapFileOutputFormat":{"name":"org.apache.hadoop.mapreduce.lib.output.MapFileOutputFormat","methods":{"org.apache.hadoop.mapreduce.RecordWriter getRecordWriter(org.apache.hadoop.mapreduce.TaskAttemptContext) throws java.io.IOException":{"name":"getRecordWriter","returnType":"org.apache.hadoop.mapreduce.RecordWriter","args":["org.apache.hadoop.mapreduce.TaskAttemptContext"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.io.Writable getEntry([Lorg.apache.hadoop.io.MapFile$Reader;, org.apache.hadoop.mapreduce.Partitioner, org.apache.hadoop.io.WritableComparable, org.apache.hadoop.io.Writable) throws java.io.IOException":{"name":"getEntry","returnType":"org.apache.hadoop.io.Writable","args":["[Lorg.apache.hadoop.io.MapFile$Reader;","org.apache.hadoop.mapreduce.Partitioner","org.apache.hadoop.io.WritableComparable","org.apache.hadoop.io.Writable"],"exceptions":["java.io.IOException"]},"[Lorg.apache.hadoop.io.MapFile$Reader; getReaders(org.apache.hadoop.fs.Path, org.apache.hadoop.conf.Configuration) throws java.io.IOException":{"name":"getReaders","returnType":"[Lorg.apache.hadoop.io.MapFile$Reader;","args":["org.apache.hadoop.fs.Path","org.apache.hadoop.conf.Configuration"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.mapred.lib.aggregate.LongValueMax":{"name":"org.apache.hadoop.mapred.lib.aggregate.LongValueMax","methods":{}},"org.apache.hadoop.mapred.lib.CombineFileRecordReader":{"name":"org.apache.hadoop.mapred.lib.CombineFileRecordReader","methods":{"long getPos() throws java.io.IOException":{"name":"getPos","returnType":"long","args":[],"exceptions":["java.io.IOException"]},"float getProgress() throws java.io.IOException":{"name":"getProgress","returnType":"float","args":[],"exceptions":["java.io.IOException"]},"java.lang.Object createKey()":{"name":"createKey","returnType":"java.lang.Object","args":[],"exceptions":[]},"java.lang.Object createValue()":{"name":"createValue","returnType":"java.lang.Object","args":[],"exceptions":[]},"boolean next(java.lang.Object, java.lang.Object) throws java.io.IOException":{"name":"next","returnType":"boolean","args":["java.lang.Object","java.lang.Object"],"exceptions":["java.io.IOException"]},"void close() throws java.io.IOException":{"name":"close","returnType":"void","args":[],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.mapreduce.Mapper":{"name":"org.apache.hadoop.mapreduce.Mapper","methods":{"void run(org.apache.hadoop.mapreduce.Mapper$Context) throws java.lang.InterruptedException, java.io.IOException":{"name":"run","returnType":"void","args":["org.apache.hadoop.mapreduce.Mapper$Context"],"exceptions":["java.lang.InterruptedException","java.io.IOException"]}}},"org.apache.hadoop.mapreduce.lib.input.FixedLengthInputFormat":{"name":"org.apache.hadoop.mapreduce.lib.input.FixedLengthInputFormat","methods":{"org.apache.hadoop.mapreduce.RecordReader createRecordReader(org.apache.hadoop.mapreduce.InputSplit, org.apache.hadoop.mapreduce.TaskAttemptContext) throws java.lang.InterruptedException, java.io.IOException":{"name":"createRecordReader","returnType":"org.apache.hadoop.mapreduce.RecordReader","args":["org.apache.hadoop.mapreduce.InputSplit","org.apache.hadoop.mapreduce.TaskAttemptContext"],"exceptions":["java.lang.InterruptedException","java.io.IOException"]},"void setRecordLength(org.apache.hadoop.conf.Configuration, int)":{"name":"setRecordLength","returnType":"void","args":["org.apache.hadoop.conf.Configuration","int"],"exceptions":[]},"int getRecordLength(org.apache.hadoop.conf.Configuration)":{"name":"getRecordLength","returnType":"int","args":["org.apache.hadoop.conf.Configuration"],"exceptions":[]}}},"org.apache.hadoop.mapred.InvalidInputException":{"name":"org.apache.hadoop.mapred.InvalidInputException","methods":{"java.util.List getProblems()":{"name":"getProblems","returnType":"java.util.List","args":[],"exceptions":[]},"java.lang.String getMessage()":{"name":"getMessage","returnType":"java.lang.String","args":[],"exceptions":[]}}},"org.apache.hadoop.mapreduce.lib.input.MultipleInputs":{"name":"org.apache.hadoop.mapreduce.lib.input.MultipleInputs","methods":{"void addInputPath(org.apache.hadoop.mapreduce.Job, org.apache.hadoop.fs.Path, java.lang.Class)":{"name":"addInputPath","returnType":"void","args":["org.apache.hadoop.mapreduce.Job","org.apache.hadoop.fs.Path","java.lang.Class"],"exceptions":[]},"void addInputPath(org.apache.hadoop.mapreduce.Job, org.apache.hadoop.fs.Path, java.lang.Class, java.lang.Class)":{"name":"addInputPath","returnType":"void","args":["org.apache.hadoop.mapreduce.Job","org.apache.hadoop.fs.Path","java.lang.Class","java.lang.Class"],"exceptions":[]}}},"org.apache.hadoop.mapred.OutputCollector":{"name":"org.apache.hadoop.mapred.OutputCollector","methods":{"void collect(java.lang.Object, java.lang.Object) throws java.io.IOException":{"name":"collect","returnType":"void","args":["java.lang.Object","java.lang.Object"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.mapreduce.lib.output.MultipleOutputs":{"name":"org.apache.hadoop.mapreduce.lib.output.MultipleOutputs","methods":{"void write(java.lang.Object, java.lang.Object, java.lang.String) throws java.lang.InterruptedException, java.io.IOException":{"name":"write","returnType":"void","args":["java.lang.Object","java.lang.Object","java.lang.String"],"exceptions":["java.lang.InterruptedException","java.io.IOException"]},"void setCountersEnabled(org.apache.hadoop.mapreduce.Job, boolean)":{"name":"setCountersEnabled","returnType":"void","args":["org.apache.hadoop.mapreduce.Job","boolean"],"exceptions":[]},"boolean getCountersEnabled(org.apache.hadoop.mapreduce.JobContext)":{"name":"getCountersEnabled","returnType":"boolean","args":["org.apache.hadoop.mapreduce.JobContext"],"exceptions":[]},"void close() throws java.lang.InterruptedException, java.io.IOException":{"name":"close","returnType":"void","args":[],"exceptions":["java.lang.InterruptedException","java.io.IOException"]},"void write(java.lang.String, java.lang.Object, java.lang.Object, java.lang.String) throws java.lang.InterruptedException, java.io.IOException":{"name":"write","returnType":"void","args":["java.lang.String","java.lang.Object","java.lang.Object","java.lang.String"],"exceptions":["java.lang.InterruptedException","java.io.IOException"]},"void addNamedOutput(org.apache.hadoop.mapreduce.Job, java.lang.String, java.lang.Class, java.lang.Class, java.lang.Class)":{"name":"addNamedOutput","returnType":"void","args":["org.apache.hadoop.mapreduce.Job","java.lang.String","java.lang.Class","java.lang.Class","java.lang.Class"],"exceptions":[]},"void write(java.lang.String, java.lang.Object, java.lang.Object) throws java.lang.InterruptedException, java.io.IOException":{"name":"write","returnType":"void","args":["java.lang.String","java.lang.Object","java.lang.Object"],"exceptions":["java.lang.InterruptedException","java.io.IOException"]}}},"org.apache.hadoop.mapred.join.OverrideRecordReader":{"name":"org.apache.hadoop.mapred.join.OverrideRecordReader","methods":{}},"org.apache.hadoop.mapred.join.ComposableRecordReader":{"name":"org.apache.hadoop.mapred.join.ComposableRecordReader","methods":{"int id()":{"name":"id","returnType":"int","args":[],"exceptions":[]},"void accept(org.apache.hadoop.mapred.join.CompositeRecordReader$JoinCollector, org.apache.hadoop.io.WritableComparable) throws java.io.IOException":{"name":"accept","returnType":"void","args":["org.apache.hadoop.mapred.join.CompositeRecordReader$JoinCollector","org.apache.hadoop.io.WritableComparable"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.io.WritableComparable key()":{"name":"key","returnType":"org.apache.hadoop.io.WritableComparable","args":[],"exceptions":[]},"void skip(org.apache.hadoop.io.WritableComparable) throws java.io.IOException":{"name":"skip","returnType":"void","args":["org.apache.hadoop.io.WritableComparable"],"exceptions":["java.io.IOException"]},"boolean hasNext()":{"name":"hasNext","returnType":"boolean","args":[],"exceptions":[]},"void key(org.apache.hadoop.io.WritableComparable) throws java.io.IOException":{"name":"key","returnType":"void","args":["org.apache.hadoop.io.WritableComparable"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.mapred.TextInputFormat":{"name":"org.apache.hadoop.mapred.TextInputFormat","methods":{"org.apache.hadoop.mapred.RecordReader getRecordReader(org.apache.hadoop.mapred.InputSplit, org.apache.hadoop.mapred.JobConf, org.apache.hadoop.mapred.Reporter) throws java.io.IOException":{"name":"getRecordReader","returnType":"org.apache.hadoop.mapred.RecordReader","args":["org.apache.hadoop.mapred.InputSplit","org.apache.hadoop.mapred.JobConf","org.apache.hadoop.mapred.Reporter"],"exceptions":["java.io.IOException"]},"void configure(org.apache.hadoop.mapred.JobConf)":{"name":"configure","returnType":"void","args":["org.apache.hadoop.mapred.JobConf"],"exceptions":[]}}},"org.apache.hadoop.mapred.lib.InverseMapper":{"name":"org.apache.hadoop.mapred.lib.InverseMapper","methods":{"void map(java.lang.Object, java.lang.Object, org.apache.hadoop.mapred.OutputCollector, org.apache.hadoop.mapred.Reporter) throws java.io.IOException":{"name":"map","returnType":"void","args":["java.lang.Object","java.lang.Object","org.apache.hadoop.mapred.OutputCollector","org.apache.hadoop.mapred.Reporter"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.mapreduce.lib.fieldsel.FieldSelectionMapper":{"name":"org.apache.hadoop.mapreduce.lib.fieldsel.FieldSelectionMapper","methods":{"void map(java.lang.Object, java.lang.Object, org.apache.hadoop.mapreduce.Mapper$Context) throws java.lang.InterruptedException, java.io.IOException":{"name":"map","returnType":"void","args":["java.lang.Object","java.lang.Object","org.apache.hadoop.mapreduce.Mapper$Context"],"exceptions":["java.lang.InterruptedException","java.io.IOException"]},"void setup(org.apache.hadoop.mapreduce.Mapper$Context) throws java.lang.InterruptedException, java.io.IOException":{"name":"setup","returnType":"void","args":["org.apache.hadoop.mapreduce.Mapper$Context"],"exceptions":["java.lang.InterruptedException","java.io.IOException"]}}},"org.apache.hadoop.mapreduce.counters.AbstractCounters":{"name":"org.apache.hadoop.mapreduce.counters.AbstractCounters","methods":{"int hashCode()":{"name":"hashCode","returnType":"int","args":[],"exceptions":[]},"org.apache.hadoop.mapreduce.Counter findCounter(java.lang.Enum)":{"name":"findCounter","returnType":"org.apache.hadoop.mapreduce.Counter","args":["java.lang.Enum"],"exceptions":[]},"int countCounters()":{"name":"countCounters","returnType":"int","args":[],"exceptions":[]},"boolean getWriteAllCounters()":{"name":"getWriteAllCounters","returnType":"boolean","args":[],"exceptions":[]},"org.apache.hadoop.mapreduce.counters.CounterGroupBase addGroup(org.apache.hadoop.mapreduce.counters.CounterGroupBase)":{"name":"addGroup","returnType":"org.apache.hadoop.mapreduce.counters.CounterGroupBase","args":["org.apache.hadoop.mapreduce.counters.CounterGroupBase"],"exceptions":[]},"boolean equals(java.lang.Object)":{"name":"equals","returnType":"boolean","args":["java.lang.Object"],"exceptions":[]},"org.apache.hadoop.mapreduce.Counter findCounter(java.lang.String, java.lang.String)":{"name":"findCounter","returnType":"org.apache.hadoop.mapreduce.Counter","args":["java.lang.String","java.lang.String"],"exceptions":[]},"java.lang.String toString()":{"name":"toString","returnType":"java.lang.String","args":[],"exceptions":[]},"void write(java.io.DataOutput) throws java.io.IOException":{"name":"write","returnType":"void","args":["java.io.DataOutput"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.mapreduce.Counter findCounter(java.lang.String, org.apache.hadoop.mapreduce.FileSystemCounter)":{"name":"findCounter","returnType":"org.apache.hadoop.mapreduce.Counter","args":["java.lang.String","org.apache.hadoop.mapreduce.FileSystemCounter"],"exceptions":[]},"org.apache.hadoop.mapreduce.counters.CounterGroupBase addGroup(java.lang.String, java.lang.String)":{"name":"addGroup","returnType":"org.apache.hadoop.mapreduce.counters.CounterGroupBase","args":["java.lang.String","java.lang.String"],"exceptions":[]},"org.apache.hadoop.mapreduce.counters.CounterGroupBase getGroup(java.lang.String)":{"name":"getGroup","returnType":"org.apache.hadoop.mapreduce.counters.CounterGroupBase","args":["java.lang.String"],"exceptions":[]},"java.util.Iterator iterator()":{"name":"iterator","returnType":"java.util.Iterator","args":[],"exceptions":[]},"org.apache.hadoop.mapreduce.counters.Limits limits()":{"name":"limits","returnType":"org.apache.hadoop.mapreduce.counters.Limits","args":[],"exceptions":[]},"void setWriteAllCounters(boolean)":{"name":"setWriteAllCounters","returnType":"void","args":["boolean"],"exceptions":[]},"java.lang.Iterable getGroupNames()":{"name":"getGroupNames","returnType":"java.lang.Iterable","args":[],"exceptions":[]},"void incrAllCounters(org.apache.hadoop.mapreduce.counters.AbstractCounters)":{"name":"incrAllCounters","returnType":"void","args":["org.apache.hadoop.mapreduce.counters.AbstractCounters"],"exceptions":[]},"void readFields(java.io.DataInput) throws java.io.IOException":{"name":"readFields","returnType":"void","args":["java.io.DataInput"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.mapred.SequenceFileAsTextInputFormat":{"name":"org.apache.hadoop.mapred.SequenceFileAsTextInputFormat","methods":{"org.apache.hadoop.mapred.RecordReader getRecordReader(org.apache.hadoop.mapred.InputSplit, org.apache.hadoop.mapred.JobConf, org.apache.hadoop.mapred.Reporter) throws java.io.IOException":{"name":"getRecordReader","returnType":"org.apache.hadoop.mapred.RecordReader","args":["org.apache.hadoop.mapred.InputSplit","org.apache.hadoop.mapred.JobConf","org.apache.hadoop.mapred.Reporter"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.mapred.Counters$Group":{"name":"org.apache.hadoop.mapred.Counters$Group","methods":{"int hashCode()":{"name":"hashCode","returnType":"int","args":[],"exceptions":[]},"org.apache.hadoop.mapred.Counters$Counter findCounter(java.lang.String, java.lang.String)":{"name":"findCounter","returnType":"org.apache.hadoop.mapred.Counters$Counter","args":["java.lang.String","java.lang.String"],"exceptions":[]},"int size()":{"name":"size","returnType":"int","args":[],"exceptions":[]},"org.apache.hadoop.mapred.Counters$Counter getCounter(int, java.lang.String)":{"name":"getCounter","returnType":"org.apache.hadoop.mapred.Counters$Counter","args":["int","java.lang.String"],"exceptions":[]},"java.lang.String makeEscapedCompactString()":{"name":"makeEscapedCompactString","returnType":"java.lang.String","args":[],"exceptions":[]},"org.apache.hadoop.mapreduce.Counter findCounter(java.lang.String, boolean)":{"name":"findCounter","returnType":"org.apache.hadoop.mapreduce.Counter","args":["java.lang.String","boolean"],"exceptions":[]},"boolean equals(java.lang.Object)":{"name":"equals","returnType":"boolean","args":["java.lang.Object"],"exceptions":[]},"org.apache.hadoop.mapreduce.Counter findCounter(java.lang.String, java.lang.String)":{"name":"findCounter","returnType":"org.apache.hadoop.mapreduce.Counter","args":["java.lang.String","java.lang.String"],"exceptions":[]},"void write(java.io.DataOutput) throws java.io.IOException":{"name":"write","returnType":"void","args":["java.io.DataOutput"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.mapred.Counters$Counter addCounter(java.lang.String, java.lang.String, long)":{"name":"addCounter","returnType":"org.apache.hadoop.mapred.Counters$Counter","args":["java.lang.String","java.lang.String","long"],"exceptions":[]},"org.apache.hadoop.mapred.Counters$Counter getCounterForName(java.lang.String)":{"name":"getCounterForName","returnType":"org.apache.hadoop.mapred.Counters$Counter","args":["java.lang.String"],"exceptions":[]},"org.apache.hadoop.mapreduce.counters.CounterGroupBase getUnderlyingGroup()":{"name":"getUnderlyingGroup","returnType":"org.apache.hadoop.mapreduce.counters.CounterGroupBase","args":[],"exceptions":[]},"java.util.Iterator iterator()":{"name":"iterator","returnType":"java.util.Iterator","args":[],"exceptions":[]},"void incrAllCounters(org.apache.hadoop.mapreduce.counters.CounterGroupBase)":{"name":"incrAllCounters","returnType":"void","args":["org.apache.hadoop.mapreduce.counters.CounterGroupBase"],"exceptions":[]},"void setDisplayName(java.lang.String)":{"name":"setDisplayName","returnType":"void","args":["java.lang.String"],"exceptions":[]},"org.apache.hadoop.mapred.Counters$Counter findCounter(java.lang.String)":{"name":"findCounter","returnType":"org.apache.hadoop.mapred.Counters$Counter","args":["java.lang.String"],"exceptions":[]},"org.apache.hadoop.mapreduce.Counter addCounter(java.lang.String, java.lang.String, long)":{"name":"addCounter","returnType":"org.apache.hadoop.mapreduce.Counter","args":["java.lang.String","java.lang.String","long"],"exceptions":[]},"long getCounter(java.lang.String)":{"name":"getCounter","returnType":"long","args":["java.lang.String"],"exceptions":[]},"org.apache.hadoop.mapred.Counters$Counter findCounter(java.lang.String, boolean)":{"name":"findCounter","returnType":"org.apache.hadoop.mapred.Counters$Counter","args":["java.lang.String","boolean"],"exceptions":[]},"org.apache.hadoop.mapreduce.Counter findCounter(java.lang.String)":{"name":"findCounter","returnType":"org.apache.hadoop.mapreduce.Counter","args":["java.lang.String"],"exceptions":[]},"void addCounter(org.apache.hadoop.mapred.Counters$Counter)":{"name":"addCounter","returnType":"void","args":["org.apache.hadoop.mapred.Counters$Counter"],"exceptions":[]},"java.lang.String getName()":{"name":"getName","returnType":"java.lang.String","args":[],"exceptions":[]},"void addCounter(org.apache.hadoop.mapreduce.Counter)":{"name":"addCounter","returnType":"void","args":["org.apache.hadoop.mapreduce.Counter"],"exceptions":[]},"java.lang.String getDisplayName()":{"name":"getDisplayName","returnType":"java.lang.String","args":[],"exceptions":[]},"void readFields(java.io.DataInput) throws java.io.IOException":{"name":"readFields","returnType":"void","args":["java.io.DataInput"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.mapred.lib.MultipleSequenceFileOutputFormat":{"name":"org.apache.hadoop.mapred.lib.MultipleSequenceFileOutputFormat","methods":{}},"org.apache.hadoop.mapreduce.lib.map.InverseMapper":{"name":"org.apache.hadoop.mapreduce.lib.map.InverseMapper","methods":{"void map(java.lang.Object, java.lang.Object, org.apache.hadoop.mapreduce.Mapper$Context) throws java.lang.InterruptedException, java.io.IOException":{"name":"map","returnType":"void","args":["java.lang.Object","java.lang.Object","org.apache.hadoop.mapreduce.Mapper$Context"],"exceptions":["java.lang.InterruptedException","java.io.IOException"]}}},"org.apache.hadoop.mapreduce.lib.db.DBWritable":{"name":"org.apache.hadoop.mapreduce.lib.db.DBWritable","methods":{"void readFields(java.sql.ResultSet) throws java.sql.SQLException":{"name":"readFields","returnType":"void","args":["java.sql.ResultSet"],"exceptions":["java.sql.SQLException"]},"void write(java.sql.PreparedStatement) throws java.sql.SQLException":{"name":"write","returnType":"void","args":["java.sql.PreparedStatement"],"exceptions":["java.sql.SQLException"]}}},"org.apache.hadoop.mapreduce.lib.input.CombineFileRecordReader":{"name":"org.apache.hadoop.mapreduce.lib.input.CombineFileRecordReader","methods":{"java.lang.Object getCurrentValue() throws java.lang.InterruptedException, java.io.IOException":{"name":"getCurrentValue","returnType":"java.lang.Object","args":[],"exceptions":["java.lang.InterruptedException","java.io.IOException"]},"void initialize(org.apache.hadoop.mapreduce.InputSplit, org.apache.hadoop.mapreduce.TaskAttemptContext) throws java.lang.InterruptedException, java.io.IOException":{"name":"initialize","returnType":"void","args":["org.apache.hadoop.mapreduce.InputSplit","org.apache.hadoop.mapreduce.TaskAttemptContext"],"exceptions":["java.lang.InterruptedException","java.io.IOException"]},"boolean nextKeyValue() throws java.lang.InterruptedException, java.io.IOException":{"name":"nextKeyValue","returnType":"boolean","args":[],"exceptions":["java.lang.InterruptedException","java.io.IOException"]},"float getProgress() throws java.lang.InterruptedException, java.io.IOException":{"name":"getProgress","returnType":"float","args":[],"exceptions":["java.lang.InterruptedException","java.io.IOException"]},"java.lang.Object getCurrentKey() throws java.lang.InterruptedException, java.io.IOException":{"name":"getCurrentKey","returnType":"java.lang.Object","args":[],"exceptions":["java.lang.InterruptedException","java.io.IOException"]},"void close() throws java.io.IOException":{"name":"close","returnType":"void","args":[],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.mapreduce.lib.input.FileInputFormat":{"name":"org.apache.hadoop.mapreduce.lib.input.FileInputFormat","methods":{"void setInputPaths(org.apache.hadoop.mapreduce.Job, [Lorg.apache.hadoop.fs.Path;) throws java.io.IOException":{"name":"setInputPaths","returnType":"void","args":["org.apache.hadoop.mapreduce.Job","[Lorg.apache.hadoop.fs.Path;"],"exceptions":["java.io.IOException"]},"void setInputPathFilter(org.apache.hadoop.mapreduce.Job, java.lang.Class)":{"name":"setInputPathFilter","returnType":"void","args":["org.apache.hadoop.mapreduce.Job","java.lang.Class"],"exceptions":[]},"long getMinSplitSize(org.apache.hadoop.mapreduce.JobContext)":{"name":"getMinSplitSize","returnType":"long","args":["org.apache.hadoop.mapreduce.JobContext"],"exceptions":[]},"void addInputPath(org.apache.hadoop.mapreduce.Job, org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"addInputPath","returnType":"void","args":["org.apache.hadoop.mapreduce.Job","org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"[Lorg.apache.hadoop.fs.Path; getInputPaths(org.apache.hadoop.mapreduce.JobContext)":{"name":"getInputPaths","returnType":"[Lorg.apache.hadoop.fs.Path;","args":["org.apache.hadoop.mapreduce.JobContext"],"exceptions":[]},"boolean getInputDirRecursive(org.apache.hadoop.mapreduce.JobContext)":{"name":"getInputDirRecursive","returnType":"boolean","args":["org.apache.hadoop.mapreduce.JobContext"],"exceptions":[]},"void setInputDirRecursive(org.apache.hadoop.mapreduce.Job, boolean)":{"name":"setInputDirRecursive","returnType":"void","args":["org.apache.hadoop.mapreduce.Job","boolean"],"exceptions":[]},"long getMaxSplitSize(org.apache.hadoop.mapreduce.JobContext)":{"name":"getMaxSplitSize","returnType":"long","args":["org.apache.hadoop.mapreduce.JobContext"],"exceptions":[]},"org.apache.hadoop.fs.PathFilter getInputPathFilter(org.apache.hadoop.mapreduce.JobContext)":{"name":"getInputPathFilter","returnType":"org.apache.hadoop.fs.PathFilter","args":["org.apache.hadoop.mapreduce.JobContext"],"exceptions":[]},"void setMinInputSplitSize(org.apache.hadoop.mapreduce.Job, long)":{"name":"setMinInputSplitSize","returnType":"void","args":["org.apache.hadoop.mapreduce.Job","long"],"exceptions":[]},"void setMaxInputSplitSize(org.apache.hadoop.mapreduce.Job, long)":{"name":"setMaxInputSplitSize","returnType":"void","args":["org.apache.hadoop.mapreduce.Job","long"],"exceptions":[]},"void setInputPaths(org.apache.hadoop.mapreduce.Job, java.lang.String) throws java.io.IOException":{"name":"setInputPaths","returnType":"void","args":["org.apache.hadoop.mapreduce.Job","java.lang.String"],"exceptions":["java.io.IOException"]},"void addInputPaths(org.apache.hadoop.mapreduce.Job, java.lang.String) throws java.io.IOException":{"name":"addInputPaths","returnType":"void","args":["org.apache.hadoop.mapreduce.Job","java.lang.String"],"exceptions":["java.io.IOException"]},"java.util.List getSplits(org.apache.hadoop.mapreduce.JobContext) throws java.io.IOException":{"name":"getSplits","returnType":"java.util.List","args":["org.apache.hadoop.mapreduce.JobContext"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.mapred.join.MultiFilterRecordReader":{"name":"org.apache.hadoop.mapred.join.MultiFilterRecordReader","methods":{"java.lang.Object createKey()":{"name":"createKey","returnType":"java.lang.Object","args":[],"exceptions":[]},"java.lang.Object createValue()":{"name":"createValue","returnType":"java.lang.Object","args":[],"exceptions":[]},"boolean next(org.apache.hadoop.io.WritableComparable, org.apache.hadoop.io.Writable) throws java.io.IOException":{"name":"next","returnType":"boolean","args":["org.apache.hadoop.io.WritableComparable","org.apache.hadoop.io.Writable"],"exceptions":["java.io.IOException"]},"int compareTo(java.lang.Object)":{"name":"compareTo","returnType":"int","args":["java.lang.Object"],"exceptions":[]},"boolean next(java.lang.Object, java.lang.Object) throws java.io.IOException":{"name":"next","returnType":"boolean","args":["java.lang.Object","java.lang.Object"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.io.Writable createValue()":{"name":"createValue","returnType":"org.apache.hadoop.io.Writable","args":[],"exceptions":[]}}},"org.apache.hadoop.mapreduce.lib.join.CompositeInputFormat":{"name":"org.apache.hadoop.mapreduce.lib.join.CompositeInputFormat","methods":{"java.lang.String compose(java.lang.String, java.lang.Class, [Lorg.apache.hadoop.fs.Path;)":{"name":"compose","returnType":"java.lang.String","args":["java.lang.String","java.lang.Class","[Lorg.apache.hadoop.fs.Path;"],"exceptions":[]},"void setFormat(org.apache.hadoop.conf.Configuration) throws java.io.IOException":{"name":"setFormat","returnType":"void","args":["org.apache.hadoop.conf.Configuration"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.mapreduce.RecordReader createRecordReader(org.apache.hadoop.mapreduce.InputSplit, org.apache.hadoop.mapreduce.TaskAttemptContext) throws java.lang.InterruptedException, java.io.IOException":{"name":"createRecordReader","returnType":"org.apache.hadoop.mapreduce.RecordReader","args":["org.apache.hadoop.mapreduce.InputSplit","org.apache.hadoop.mapreduce.TaskAttemptContext"],"exceptions":["java.lang.InterruptedException","java.io.IOException"]},"java.lang.String compose(java.lang.Class, java.lang.String)":{"name":"compose","returnType":"java.lang.String","args":["java.lang.Class","java.lang.String"],"exceptions":[]},"java.lang.String compose(java.lang.String, java.lang.Class, [Ljava.lang.String;)":{"name":"compose","returnType":"java.lang.String","args":["java.lang.String","java.lang.Class","[Ljava.lang.String;"],"exceptions":[]},"java.util.List getSplits(org.apache.hadoop.mapreduce.JobContext) throws java.lang.InterruptedException, java.io.IOException":{"name":"getSplits","returnType":"java.util.List","args":["org.apache.hadoop.mapreduce.JobContext"],"exceptions":["java.lang.InterruptedException","java.io.IOException"]}}},"org.apache.hadoop.mapred.FileOutputCommitter":{"name":"org.apache.hadoop.mapred.FileOutputCommitter","methods":{"void cleanupJob(org.apache.hadoop.mapred.JobContext) throws java.io.IOException":{"name":"cleanupJob","returnType":"void","args":["org.apache.hadoop.mapred.JobContext"],"exceptions":["java.io.IOException"]},"void setupTask(org.apache.hadoop.mapred.TaskAttemptContext) throws java.io.IOException":{"name":"setupTask","returnType":"void","args":["org.apache.hadoop.mapred.TaskAttemptContext"],"exceptions":["java.io.IOException"]},"boolean isCommitJobRepeatable(org.apache.hadoop.mapred.JobContext) throws java.io.IOException":{"name":"isCommitJobRepeatable","returnType":"boolean","args":["org.apache.hadoop.mapred.JobContext"],"exceptions":["java.io.IOException"]},"void abortTask(org.apache.hadoop.mapred.TaskAttemptContext) throws java.io.IOException":{"name":"abortTask","returnType":"void","args":["org.apache.hadoop.mapred.TaskAttemptContext"],"exceptions":["java.io.IOException"]},"boolean needsTaskCommit(org.apache.hadoop.mapred.TaskAttemptContext) throws java.io.IOException":{"name":"needsTaskCommit","returnType":"boolean","args":["org.apache.hadoop.mapred.TaskAttemptContext"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.Path getWorkPath(org.apache.hadoop.mapred.TaskAttemptContext, org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"getWorkPath","returnType":"org.apache.hadoop.fs.Path","args":["org.apache.hadoop.mapred.TaskAttemptContext","org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"void abortJob(org.apache.hadoop.mapred.JobContext, int) throws java.io.IOException":{"name":"abortJob","returnType":"void","args":["org.apache.hadoop.mapred.JobContext","int"],"exceptions":["java.io.IOException"]},"void commitTask(org.apache.hadoop.mapred.TaskAttemptContext) throws java.io.IOException":{"name":"commitTask","returnType":"void","args":["org.apache.hadoop.mapred.TaskAttemptContext"],"exceptions":["java.io.IOException"]},"boolean isRecoverySupported(org.apache.hadoop.mapred.JobContext) throws java.io.IOException":{"name":"isRecoverySupported","returnType":"boolean","args":["org.apache.hadoop.mapred.JobContext"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.Path getTaskAttemptPath(org.apache.hadoop.mapred.TaskAttemptContext) throws java.io.IOException":{"name":"getTaskAttemptPath","returnType":"org.apache.hadoop.fs.Path","args":["org.apache.hadoop.mapred.TaskAttemptContext"],"exceptions":["java.io.IOException"]},"void setupJob(org.apache.hadoop.mapred.JobContext) throws java.io.IOException":{"name":"setupJob","returnType":"void","args":["org.apache.hadoop.mapred.JobContext"],"exceptions":["java.io.IOException"]},"void commitJob(org.apache.hadoop.mapred.JobContext) throws java.io.IOException":{"name":"commitJob","returnType":"void","args":["org.apache.hadoop.mapred.JobContext"],"exceptions":["java.io.IOException"]},"void recoverTask(org.apache.hadoop.mapred.TaskAttemptContext) throws java.io.IOException":{"name":"recoverTask","returnType":"void","args":["org.apache.hadoop.mapred.TaskAttemptContext"],"exceptions":["java.io.IOException"]},"boolean isRecoverySupported()":{"name":"isRecoverySupported","returnType":"boolean","args":[],"exceptions":[]}}},"org.apache.hadoop.mapred.lib.aggregate.StringValueMin":{"name":"org.apache.hadoop.mapred.lib.aggregate.StringValueMin","methods":{}},"org.apache.hadoop.mapreduce.lib.partition.KeyFieldBasedPartitioner":{"name":"org.apache.hadoop.mapreduce.lib.partition.KeyFieldBasedPartitioner","methods":{"org.apache.hadoop.conf.Configuration getConf()":{"name":"getConf","returnType":"org.apache.hadoop.conf.Configuration","args":[],"exceptions":[]},"void setKeyFieldPartitionerOptions(org.apache.hadoop.mapreduce.Job, java.lang.String)":{"name":"setKeyFieldPartitionerOptions","returnType":"void","args":["org.apache.hadoop.mapreduce.Job","java.lang.String"],"exceptions":[]},"void setConf(org.apache.hadoop.conf.Configuration)":{"name":"setConf","returnType":"void","args":["org.apache.hadoop.conf.Configuration"],"exceptions":[]},"int getPartition(java.lang.Object, java.lang.Object, int)":{"name":"getPartition","returnType":"int","args":["java.lang.Object","java.lang.Object","int"],"exceptions":[]},"java.lang.String getKeyFieldPartitionerOption(org.apache.hadoop.mapreduce.JobContext)":{"name":"getKeyFieldPartitionerOption","returnType":"java.lang.String","args":["org.apache.hadoop.mapreduce.JobContext"],"exceptions":[]}}},"org.apache.hadoop.mapred.SequenceFileInputFilter":{"name":"org.apache.hadoop.mapred.SequenceFileInputFilter","methods":{"void setFilterClass(org.apache.hadoop.conf.Configuration, java.lang.Class)":{"name":"setFilterClass","returnType":"void","args":["org.apache.hadoop.conf.Configuration","java.lang.Class"],"exceptions":[]},"org.apache.hadoop.mapred.RecordReader getRecordReader(org.apache.hadoop.mapred.InputSplit, org.apache.hadoop.mapred.JobConf, org.apache.hadoop.mapred.Reporter) throws java.io.IOException":{"name":"getRecordReader","returnType":"org.apache.hadoop.mapred.RecordReader","args":["org.apache.hadoop.mapred.InputSplit","org.apache.hadoop.mapred.JobConf","org.apache.hadoop.mapred.Reporter"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.mapreduce.lib.join.InnerJoinRecordReader":{"name":"org.apache.hadoop.mapreduce.lib.join.InnerJoinRecordReader","methods":{}},"org.apache.hadoop.mapred.MapRunner":{"name":"org.apache.hadoop.mapred.MapRunner","methods":{"void configure(org.apache.hadoop.mapred.JobConf)":{"name":"configure","returnType":"void","args":["org.apache.hadoop.mapred.JobConf"],"exceptions":[]},"void run(org.apache.hadoop.mapred.RecordReader, org.apache.hadoop.mapred.OutputCollector, org.apache.hadoop.mapred.Reporter) throws java.io.IOException":{"name":"run","returnType":"void","args":["org.apache.hadoop.mapred.RecordReader","org.apache.hadoop.mapred.OutputCollector","org.apache.hadoop.mapred.Reporter"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.mapreduce.RecordReader":{"name":"org.apache.hadoop.mapreduce.RecordReader","methods":{"java.lang.Object getCurrentValue() throws java.lang.InterruptedException, java.io.IOException":{"name":"getCurrentValue","returnType":"java.lang.Object","args":[],"exceptions":["java.lang.InterruptedException","java.io.IOException"]},"void initialize(org.apache.hadoop.mapreduce.InputSplit, org.apache.hadoop.mapreduce.TaskAttemptContext) throws java.lang.InterruptedException, java.io.IOException":{"name":"initialize","returnType":"void","args":["org.apache.hadoop.mapreduce.InputSplit","org.apache.hadoop.mapreduce.TaskAttemptContext"],"exceptions":["java.lang.InterruptedException","java.io.IOException"]},"boolean nextKeyValue() throws java.lang.InterruptedException, java.io.IOException":{"name":"nextKeyValue","returnType":"boolean","args":[],"exceptions":["java.lang.InterruptedException","java.io.IOException"]},"float getProgress() throws java.lang.InterruptedException, java.io.IOException":{"name":"getProgress","returnType":"float","args":[],"exceptions":["java.lang.InterruptedException","java.io.IOException"]},"java.lang.Object getCurrentKey() throws java.lang.InterruptedException, java.io.IOException":{"name":"getCurrentKey","returnType":"java.lang.Object","args":[],"exceptions":["java.lang.InterruptedException","java.io.IOException"]},"void close() throws java.io.IOException":{"name":"close","returnType":"void","args":[],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.mapred.lib.ChainMapper":{"name":"org.apache.hadoop.mapred.lib.ChainMapper","methods":{"void map(java.lang.Object, java.lang.Object, org.apache.hadoop.mapred.OutputCollector, org.apache.hadoop.mapred.Reporter) throws java.io.IOException":{"name":"map","returnType":"void","args":["java.lang.Object","java.lang.Object","org.apache.hadoop.mapred.OutputCollector","org.apache.hadoop.mapred.Reporter"],"exceptions":["java.io.IOException"]},"void addMapper(org.apache.hadoop.mapred.JobConf, java.lang.Class, java.lang.Class, java.lang.Class, java.lang.Class, java.lang.Class, boolean, org.apache.hadoop.mapred.JobConf)":{"name":"addMapper","returnType":"void","args":["org.apache.hadoop.mapred.JobConf","java.lang.Class","java.lang.Class","java.lang.Class","java.lang.Class","java.lang.Class","boolean","org.apache.hadoop.mapred.JobConf"],"exceptions":[]},"void configure(org.apache.hadoop.mapred.JobConf)":{"name":"configure","returnType":"void","args":["org.apache.hadoop.mapred.JobConf"],"exceptions":[]},"void close() throws java.io.IOException":{"name":"close","returnType":"void","args":[],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.mapred.JobConfigurable":{"name":"org.apache.hadoop.mapred.JobConfigurable","methods":{"void configure(org.apache.hadoop.mapred.JobConf)":{"name":"configure","returnType":"void","args":["org.apache.hadoop.mapred.JobConf"],"exceptions":[]}}},"org.apache.hadoop.mapred.lib.KeyFieldBasedPartitioner":{"name":"org.apache.hadoop.mapred.lib.KeyFieldBasedPartitioner","methods":{"void configure(org.apache.hadoop.mapred.JobConf)":{"name":"configure","returnType":"void","args":["org.apache.hadoop.mapred.JobConf"],"exceptions":[]}}},"org.apache.hadoop.mapred.lib.MultithreadedMapRunner":{"name":"org.apache.hadoop.mapred.lib.MultithreadedMapRunner","methods":{"void configure(org.apache.hadoop.mapred.JobConf)":{"name":"configure","returnType":"void","args":["org.apache.hadoop.mapred.JobConf"],"exceptions":[]},"void run(org.apache.hadoop.mapred.RecordReader, org.apache.hadoop.mapred.OutputCollector, org.apache.hadoop.mapred.Reporter) throws java.io.IOException":{"name":"run","returnType":"void","args":["org.apache.hadoop.mapred.RecordReader","org.apache.hadoop.mapred.OutputCollector","org.apache.hadoop.mapred.Reporter"],"exceptions":["java.io.IOException"]}}}}} \ No newline at end of file diff --git a/bigtop-tests/spec-tests/runtime/src/test/resources/hadoop-mapreduce-jar.list b/bigtop-tests/spec-tests/runtime/src/test/resources/hadoop-mapreduce-jar.list deleted file mode 100644 index 03167870..00000000 --- a/bigtop-tests/spec-tests/runtime/src/test/resources/hadoop-mapreduce-jar.list +++ /dev/null @@ -1,22 +0,0 @@ -netty-3\.6\.2\.Final[\.\-_].*jar -leveldbjni-all-1\.8[\.\-_].*jar -paranamer-2\.3[\.\-_].*jar -jackson-core-asl-1\.9\.13[\.\-_].*jar -jersey-server-1\.9[\.\-_].*jar -guice-3\.0[\.\-_].*jar -avro-1\.7\.[4-7][\.\-_].*jar -log4j-1\.2\.17[\.\-_].*jar -jackson-mapper-asl-1\.9\.13[\.\-_].*jar -snappy-java-1\.0\.[45](\.[0-9])?[\.\-_].*jar -jersey-core-1\.9[\.\-_].*jar -jersey-guice-1\.9[\.\-_].*jar -commons-compress-1\.4\.1[\.\-_].*jar -junit-4\.11[\.\-_].*jar -xz-1\.0[\.\-_].*jar -asm-3\.2[\.\-_].*jar -aopalliance-1\.0[\.\-_].*jar -javax\.inject-1[\.\-_].*jar -protobuf-java-2\.5\.0[\.\-_].*jar -commons-io-2\.4[\.\-_].*jar -hamcrest-core-1\.3[\.\-_].*jar -guice-servlet-3\.0[\.\-_].*jar diff --git a/bigtop-tests/spec-tests/runtime/src/test/resources/hadoop-mapreduce.list b/bigtop-tests/spec-tests/runtime/src/test/resources/hadoop-mapreduce.list deleted file mode 100644 index cc06d808..00000000 --- a/bigtop-tests/spec-tests/runtime/src/test/resources/hadoop-mapreduce.list +++ /dev/null @@ -1,123 +0,0 @@ -hadoop-mapreduce-client-core[\.\-_].*jar -bin -bin/mapred -sbin -sbin/mr-jobhistory-daemon\.sh -hadoop-mapreduce-client-common-2\.7\.[0-9][\.\-_].*jar -commons-digester-1\.8[\.\-_].*jar -curator-client-2\.7\.1[\.\-_].*jar -commons-beanutils(-core)?-1\.[78]\.0[\.\-_].*jar -jsp-api-2\.1[\.\-_].*jar -jets3t-0\.9\.0[\.\-_].*jar -hadoop-sls-2\.7\.[0-9][\.\-_].*jar -jackson-core-2\.2\.3[\.\-_].*jar -hadoop-mapreduce-client-hs-2\.7\.[0-9][\.\-_].*jar -hadoop-mapreduce-client-jobclient-2\.7\.[0-9].*-tests\.jar -hadoop-distcp[\.\-_].*jar -jaxb-api-2\.2\.2[\.\-_].*jar -api-util-1\.0\.0-M20[\.\-_].*jar -jettison-1\.1[\.\-_].*jar -commons-lang3-3\.3\.2[\.\-_].*jar -curator-framework-2\.7\.1[\.\-_].*jar -commons-io-2\.4[\.\-_].*jar -hadoop-mapreduce-client-hs-plugins[\.\-_].*jar -metrics-core-3\.0\.1[\.\-_].*jar -hadoop-mapreduce-client-app[\.\-_].*jar -jetty-util-6\.1\.26[\.\-_].*jar -avro-1\.7\.[4-7][\.\-_].*jar -jaxb-impl-2\.2\.3-1[\.\-_].*jar -hadoop-mapreduce-client-hs[\.\-_].*jar -hadoop-mapreduce-client-hs-plugins-2\.7\.[0-9][\.\-_].*jar -hadoop-sls[\.\-_].*jar -hadoop-ant[\.\-_].*jar -netty-3\.6\.2\.Final[\.\-_].*jar -httpcore-4\.[0-9]\.[0-9][\.\-_].*jar -jsch-0\.1\.(4[2-9]|[5-9]\d)[\.\-_].*jar -hadoop-mapreduce-client-jobclient[\.\-_].*jar -hadoop-archives[\.\-_].*jar -jersey-core-1\.9[\.\-_].*jar -jackson-mapper-asl-1\.9\.13[\.\-_].*jar -hadoop-mapreduce-examples-2\.7\.[0-9][\.\-_].*jar -hadoop-mapreduce-client-shuffle-2\.7\.[0-9][\.\-_].*jar -snappy-java-1\.0\.[45](\.[0-9])?[\.\-_].*jar -gson-2\.2\.4[\.\-_].*jar -hadoop-gridmix[\.\-_].*jar -commons-net-3\.1[\.\-_].*jar -asm-3\.2[\.\-_].*jar -commons-compress-1\.4\.1[\.\-_].*jar -mockito-all-1\.8\.5[\.\-_].*jar -hadoop-openstack[\.\-_].*jar -jackson-xc-1\.9\.13[\.\-_].*jar -junit-4\.11[\.\-_].*jar -jersey-json-1\.9[\.\-_].*jar -hadoop-distcp-2\.7\.[0-9][\.\-_].*jar -xmlenc-0\.52[\.\-_].*jar -api-asn1-api-1\.0\.0-M20[\.\-_].*jar -commons-codec-1\.4[\.\-_].*jar -jackson-core-asl-1\.9\.13[\.\-_].*jar -servlet-api-2\.5[\.\-_].*jar -paranamer-2\.3[\.\-_].*jar -hadoop-datajoin-2\.7\.[0-9][\.\-_].*jar -jetty-6\.1\.26[\.\-_].*jar -jersey-server-1\.9[\.\-_].*jar -hadoop-extras-2\.7\.[0-9][\.\-_].*jar -hadoop-mapreduce-client-shuffle[\.\-_].*jar -apacheds-i18n-2\.0\.0-M15[\.\-_].*jar -hadoop-auth-2\.7\.[0-9][\.\-_].*jar -hadoop-streaming-2\.7\.[0-9][\.\-_].*jar -hadoop-gridmix-2\.7\.[0-9][\.\-_].*jar -commons-math3-3\.1\.1[\.\-_].*jar -hadoop-auth[\.\-_].*jar -log4j-1\.2\.17[\.\-_].*jar -hamcrest-core-1\.3[\.\-_].*jar -hadoop-mapreduce-examples[\.\-_].*jar -hadoop-extras[\.\-_].*jar -stax-api-1\.0-2[\.\-_].*jar -hadoop-mapreduce-client-common[\.\-_].*jar -xz-1\.0[\.\-_].*jar -zookeeper-3\.4\.6[\.\-_].*jar -hadoop-archives-2\.7\.[0-9][\.\-_].*jar -activation-1\.1[\.\-_].*jar -hadoop-mapreduce-client-jobclient-2\.7\.[0-9][\.\-_].*jar -htrace-core-3\.1\.0-incubating[\.\-_].*jar -protobuf-java-2\.5\.0[\.\-_].*jar -hadoop-mapreduce-client-app-2\.7\.[0-9][\.\-_].*jar -hadoop-datajoin[\.\-_].*jar -apacheds-kerberos-codec-2\.0\.0-M15[\.\-_].*jar -java-xmlbuilder-0\.4[\.\-_].*jar -httpclient-4\.[0-9]\.[0-9][\.\-_].*jar -hadoop-rumen-2\.7\.[0-9][\.\-_].*jar -hadoop-mapreduce-client-core-2\.7\.[0-9][\.\-_].*jar -guava-11\.0\.2[\.\-_].*jar -jsr305-3\.0\.0[\.\-_].*jar -hadoop-streaming[\.\-_].*jar -hadoop-rumen[\.\-_].*jar -jackson-jaxrs-1\.9\.13[\.\-_].*jar -lib -lib/leveldbjni-all-1\.8[\.\-_].*jar -lib/commons-io-2\.4[\.\-_].*jar -lib/avro-1\.7\.[4-7][\.\-_].*jar -lib/jersey-guice-1\.9[\.\-_].*jar -lib/netty-3\.6\.2\.Final[\.\-_].*jar -lib/jersey-core-1\.9[\.\-_].*jar -lib/jackson-mapper-asl-1\.9\.13[\.\-_].*jar -lib/snappy-java-1\.0\.[45](\.[0-9])?[\.\-_].*jar -lib/asm-3\.2[\.\-_].*jar -lib/commons-compress-1\.4\.1[\.\-_].*jar -lib/aopalliance-1\.0[\.\-_].*jar -lib/junit-4\.11[\.\-_].*jar -lib/jackson-core-asl-1\.9\.13[\.\-_].*jar -lib/paranamer-2\.3[\.\-_].*jar -lib/jersey-server-1\.9[\.\-_].*jar -lib/log4j-1\.2\.17[\.\-_].*jar -lib/hamcrest-core-1\.3[\.\-_].*jar -lib/xz-1\.0[\.\-_].*jar -lib/javax\.inject-1[\.\-_].*jar -lib/protobuf-java-2\.5\.0[\.\-_].*jar -lib/guice-3\.0[\.\-_].*jar -lib/guice-servlet-3\.0[\.\-_].*jar -hadoop-openstack-2\.7\.[0-9][\.\-_].*jar -commons-httpclient-3\.1[\.\-_].*jar -commons-collections-3\.2\.[12][\.\-_].*jar -commons-logging-1\.1\.3[\.\-_].*jar -commons-lang-2\.6[\.\-_].*jar diff --git a/bigtop-tests/spec-tests/runtime/src/test/resources/hadoop-subprojs.list b/bigtop-tests/spec-tests/runtime/src/test/resources/hadoop-subprojs.list deleted file mode 100644 index b0a5654d..00000000 --- a/bigtop-tests/spec-tests/runtime/src/test/resources/hadoop-subprojs.list +++ /dev/null @@ -1,4 +0,0 @@ -hadoop-annotations\.jar -hadoop-auth\.jar -hadoop-common\.jar -hadoop-nfs\.jar diff --git a/bigtop-tests/spec-tests/runtime/src/test/resources/hadoop-yarn-api-2.7.3-api-report.json b/bigtop-tests/spec-tests/runtime/src/test/resources/hadoop-yarn-api-2.7.3-api-report.json deleted file mode 100644 index 6ad5f180..00000000 --- a/bigtop-tests/spec-tests/runtime/src/test/resources/hadoop-yarn-api-2.7.3-api-report.json +++ /dev/null @@ -1 +0,0 @@ -{"name":"hadoop-yarn-api","version":"2.7.3","classes":{"org.apache.hadoop.yarn.api.records.ApplicationAccessType":{"name":"org.apache.hadoop.yarn.api.records.ApplicationAccessType","methods":{"[Lorg.apache.hadoop.yarn.api.records.ApplicationAccessType; values()":{"name":"values","returnType":"[Lorg.apache.hadoop.yarn.api.records.ApplicationAccessType;","args":[],"exceptions":[]},"org.apache.hadoop.yarn.api.records.ApplicationAccessType valueOf(java.lang.String)":{"name":"valueOf","returnType":"org.apache.hadoop.yarn.api.records.ApplicationAccessType","args":["java.lang.String"],"exceptions":[]}}},"org.apache.hadoop.yarn.api.protocolrecords.RegisterApplicationMasterRequest":{"name":"org.apache.hadoop.yarn.api.protocolrecords.RegisterApplicationMasterRequest","methods":{"void setTrackingUrl(java.lang.String)":{"name":"setTrackingUrl","returnType":"void","args":["java.lang.String"],"exceptions":[]},"org.apache.hadoop.yarn.api.protocolrecords.RegisterApplicationMasterRequest newInstance(java.lang.String, int, java.lang.String)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.protocolrecords.RegisterApplicationMasterRequest","args":["java.lang.String","int","java.lang.String"],"exceptions":[]},"void setHost(java.lang.String)":{"name":"setHost","returnType":"void","args":["java.lang.String"],"exceptions":[]},"int getRpcPort()":{"name":"getRpcPort","returnType":"int","args":[],"exceptions":[]},"void setRpcPort(int)":{"name":"setRpcPort","returnType":"void","args":["int"],"exceptions":[]},"java.lang.String getHost()":{"name":"getHost","returnType":"java.lang.String","args":[],"exceptions":[]},"java.lang.String getTrackingUrl()":{"name":"getTrackingUrl","returnType":"java.lang.String","args":[],"exceptions":[]}}},"org.apache.hadoop.yarn.api.protocolrecords.StartContainerRequest":{"name":"org.apache.hadoop.yarn.api.protocolrecords.StartContainerRequest","methods":{"org.apache.hadoop.yarn.api.records.ContainerLaunchContext getContainerLaunchContext()":{"name":"getContainerLaunchContext","returnType":"org.apache.hadoop.yarn.api.records.ContainerLaunchContext","args":[],"exceptions":[]},"org.apache.hadoop.yarn.api.protocolrecords.StartContainerRequest newInstance(org.apache.hadoop.yarn.api.records.ContainerLaunchContext, org.apache.hadoop.yarn.api.records.Token)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.protocolrecords.StartContainerRequest","args":["org.apache.hadoop.yarn.api.records.ContainerLaunchContext","org.apache.hadoop.yarn.api.records.Token"],"exceptions":[]},"void setContainerToken(org.apache.hadoop.yarn.api.records.Token)":{"name":"setContainerToken","returnType":"void","args":["org.apache.hadoop.yarn.api.records.Token"],"exceptions":[]},"org.apache.hadoop.yarn.api.records.Token getContainerToken()":{"name":"getContainerToken","returnType":"org.apache.hadoop.yarn.api.records.Token","args":[],"exceptions":[]},"void setContainerLaunchContext(org.apache.hadoop.yarn.api.records.ContainerLaunchContext)":{"name":"setContainerLaunchContext","returnType":"void","args":["org.apache.hadoop.yarn.api.records.ContainerLaunchContext"],"exceptions":[]}}},"org.apache.hadoop.yarn.api.records.ResourceBlacklistRequest":{"name":"org.apache.hadoop.yarn.api.records.ResourceBlacklistRequest","methods":{"void setBlacklistAdditions(java.util.List)":{"name":"setBlacklistAdditions","returnType":"void","args":["java.util.List"],"exceptions":[]},"java.util.List getBlacklistRemovals()":{"name":"getBlacklistRemovals","returnType":"java.util.List","args":[],"exceptions":[]},"java.util.List getBlacklistAdditions()":{"name":"getBlacklistAdditions","returnType":"java.util.List","args":[],"exceptions":[]},"org.apache.hadoop.yarn.api.records.ResourceBlacklistRequest newInstance(java.util.List, java.util.List)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.records.ResourceBlacklistRequest","args":["java.util.List","java.util.List"],"exceptions":[]},"void setBlacklistRemovals(java.util.List)":{"name":"setBlacklistRemovals","returnType":"void","args":["java.util.List"],"exceptions":[]}}},"org.apache.hadoop.yarn.api.protocolrecords.KillApplicationRequest":{"name":"org.apache.hadoop.yarn.api.protocolrecords.KillApplicationRequest","methods":{"org.apache.hadoop.yarn.api.protocolrecords.KillApplicationRequest newInstance(org.apache.hadoop.yarn.api.records.ApplicationId)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.protocolrecords.KillApplicationRequest","args":["org.apache.hadoop.yarn.api.records.ApplicationId"],"exceptions":[]},"org.apache.hadoop.yarn.api.records.ApplicationId getApplicationId()":{"name":"getApplicationId","returnType":"org.apache.hadoop.yarn.api.records.ApplicationId","args":[],"exceptions":[]},"void setApplicationId(org.apache.hadoop.yarn.api.records.ApplicationId)":{"name":"setApplicationId","returnType":"void","args":["org.apache.hadoop.yarn.api.records.ApplicationId"],"exceptions":[]}}},"org.apache.hadoop.yarn.api.records.YarnApplicationAttemptState":{"name":"org.apache.hadoop.yarn.api.records.YarnApplicationAttemptState","methods":{"org.apache.hadoop.yarn.api.records.YarnApplicationAttemptState valueOf(java.lang.String)":{"name":"valueOf","returnType":"org.apache.hadoop.yarn.api.records.YarnApplicationAttemptState","args":["java.lang.String"],"exceptions":[]},"[Lorg.apache.hadoop.yarn.api.records.YarnApplicationAttemptState; values()":{"name":"values","returnType":"[Lorg.apache.hadoop.yarn.api.records.YarnApplicationAttemptState;","args":[],"exceptions":[]}}},"org.apache.hadoop.yarn.api.records.YarnClusterMetrics":{"name":"org.apache.hadoop.yarn.api.records.YarnClusterMetrics","methods":{"org.apache.hadoop.yarn.api.records.YarnClusterMetrics newInstance(int)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.records.YarnClusterMetrics","args":["int"],"exceptions":[]},"int getNumNodeManagers()":{"name":"getNumNodeManagers","returnType":"int","args":[],"exceptions":[]},"void setNumNodeManagers(int)":{"name":"setNumNodeManagers","returnType":"void","args":["int"],"exceptions":[]}}},"org.apache.hadoop.yarn.api.protocolrecords.AllocateRequest":{"name":"org.apache.hadoop.yarn.api.protocolrecords.AllocateRequest","methods":{"java.util.List getIncreaseRequests()":{"name":"getIncreaseRequests","returnType":"java.util.List","args":[],"exceptions":[]},"org.apache.hadoop.yarn.api.protocolrecords.AllocateRequest newInstance(int, float, java.util.List, java.util.List, org.apache.hadoop.yarn.api.records.ResourceBlacklistRequest, java.util.List)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.protocolrecords.AllocateRequest","args":["int","float","java.util.List","java.util.List","org.apache.hadoop.yarn.api.records.ResourceBlacklistRequest","java.util.List"],"exceptions":[]},"void setResponseId(int)":{"name":"setResponseId","returnType":"void","args":["int"],"exceptions":[]},"void setAskList(java.util.List)":{"name":"setAskList","returnType":"void","args":["java.util.List"],"exceptions":[]},"float getProgress()":{"name":"getProgress","returnType":"float","args":[],"exceptions":[]},"java.util.List getReleaseList()":{"name":"getReleaseList","returnType":"java.util.List","args":[],"exceptions":[]},"void setIncreaseRequests(java.util.List)":{"name":"setIncreaseRequests","returnType":"void","args":["java.util.List"],"exceptions":[]},"org.apache.hadoop.yarn.api.records.ResourceBlacklistRequest getResourceBlacklistRequest()":{"name":"getResourceBlacklistRequest","returnType":"org.apache.hadoop.yarn.api.records.ResourceBlacklistRequest","args":[],"exceptions":[]},"org.apache.hadoop.yarn.api.protocolrecords.AllocateRequest newInstance(int, float, java.util.List, java.util.List, org.apache.hadoop.yarn.api.records.ResourceBlacklistRequest)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.protocolrecords.AllocateRequest","args":["int","float","java.util.List","java.util.List","org.apache.hadoop.yarn.api.records.ResourceBlacklistRequest"],"exceptions":[]},"void setProgress(float)":{"name":"setProgress","returnType":"void","args":["float"],"exceptions":[]},"void setResourceBlacklistRequest(org.apache.hadoop.yarn.api.records.ResourceBlacklistRequest)":{"name":"setResourceBlacklistRequest","returnType":"void","args":["org.apache.hadoop.yarn.api.records.ResourceBlacklistRequest"],"exceptions":[]},"java.util.List getAskList()":{"name":"getAskList","returnType":"java.util.List","args":[],"exceptions":[]},"int getResponseId()":{"name":"getResponseId","returnType":"int","args":[],"exceptions":[]},"void setReleaseList(java.util.List)":{"name":"setReleaseList","returnType":"void","args":["java.util.List"],"exceptions":[]}}},"org.apache.hadoop.yarn.api.protocolrecords.GetQueueInfoResponse":{"name":"org.apache.hadoop.yarn.api.protocolrecords.GetQueueInfoResponse","methods":{"void setQueueInfo(org.apache.hadoop.yarn.api.records.QueueInfo)":{"name":"setQueueInfo","returnType":"void","args":["org.apache.hadoop.yarn.api.records.QueueInfo"],"exceptions":[]},"org.apache.hadoop.yarn.api.protocolrecords.GetQueueInfoResponse newInstance(org.apache.hadoop.yarn.api.records.QueueInfo)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.protocolrecords.GetQueueInfoResponse","args":["org.apache.hadoop.yarn.api.records.QueueInfo"],"exceptions":[]},"org.apache.hadoop.yarn.api.records.QueueInfo getQueueInfo()":{"name":"getQueueInfo","returnType":"org.apache.hadoop.yarn.api.records.QueueInfo","args":[],"exceptions":[]}}},"org.apache.hadoop.yarn.api.records.ApplicationReport":{"name":"org.apache.hadoop.yarn.api.records.ApplicationReport","methods":{"void setApplicationResourceUsageReport(org.apache.hadoop.yarn.api.records.ApplicationResourceUsageReport)":{"name":"setApplicationResourceUsageReport","returnType":"void","args":["org.apache.hadoop.yarn.api.records.ApplicationResourceUsageReport"],"exceptions":[]},"long getFinishTime()":{"name":"getFinishTime","returnType":"long","args":[],"exceptions":[]},"void setFinalApplicationStatus(org.apache.hadoop.yarn.api.records.FinalApplicationStatus)":{"name":"setFinalApplicationStatus","returnType":"void","args":["org.apache.hadoop.yarn.api.records.FinalApplicationStatus"],"exceptions":[]},"void setUser(java.lang.String)":{"name":"setUser","returnType":"void","args":["java.lang.String"],"exceptions":[]},"org.apache.hadoop.yarn.api.records.FinalApplicationStatus getFinalApplicationStatus()":{"name":"getFinalApplicationStatus","returnType":"org.apache.hadoop.yarn.api.records.FinalApplicationStatus","args":[],"exceptions":[]},"void setName(java.lang.String)":{"name":"setName","returnType":"void","args":["java.lang.String"],"exceptions":[]},"org.apache.hadoop.yarn.api.records.ApplicationResourceUsageReport getApplicationResourceUsageReport()":{"name":"getApplicationResourceUsageReport","returnType":"org.apache.hadoop.yarn.api.records.ApplicationResourceUsageReport","args":[],"exceptions":[]},"java.util.Set getApplicationTags()":{"name":"getApplicationTags","returnType":"java.util.Set","args":[],"exceptions":[]},"org.apache.hadoop.yarn.api.records.ApplicationReport newInstance(org.apache.hadoop.yarn.api.records.ApplicationId, org.apache.hadoop.yarn.api.records.ApplicationAttemptId, java.lang.String, java.lang.String, java.lang.String, java.lang.String, int, org.apache.hadoop.yarn.api.records.Token, org.apache.hadoop.yarn.api.records.YarnApplicationState, java.lang.String, java.lang.String, long, long, org.apache.hadoop.yarn.api.records.FinalApplicationStatus, org.apache.hadoop.yarn.api.records.ApplicationResourceUsageReport, java.lang.String, float, java.lang.String, org.apache.hadoop.yarn.api.records.Token)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.records.ApplicationReport","args":["org.apache.hadoop.yarn.api.records.ApplicationId","org.apache.hadoop.yarn.api.records.ApplicationAttemptId","java.lang.String","java.lang.String","java.lang.String","java.lang.String","int","org.apache.hadoop.yarn.api.records.Token","org.apache.hadoop.yarn.api.records.YarnApplicationState","java.lang.String","java.lang.String","long","long","org.apache.hadoop.yarn.api.records.FinalApplicationStatus","org.apache.hadoop.yarn.api.records.ApplicationResourceUsageReport","java.lang.String","float","java.lang.String","org.apache.hadoop.yarn.api.records.Token"],"exceptions":[]},"org.apache.hadoop.yarn.api.records.ApplicationId getApplicationId()":{"name":"getApplicationId","returnType":"org.apache.hadoop.yarn.api.records.ApplicationId","args":[],"exceptions":[]},"void setApplicationType(java.lang.String)":{"name":"setApplicationType","returnType":"void","args":["java.lang.String"],"exceptions":[]},"org.apache.hadoop.yarn.api.records.Token getClientToAMToken()":{"name":"getClientToAMToken","returnType":"org.apache.hadoop.yarn.api.records.Token","args":[],"exceptions":[]},"void setYarnApplicationState(org.apache.hadoop.yarn.api.records.YarnApplicationState)":{"name":"setYarnApplicationState","returnType":"void","args":["org.apache.hadoop.yarn.api.records.YarnApplicationState"],"exceptions":[]},"float getProgress()":{"name":"getProgress","returnType":"float","args":[],"exceptions":[]},"void setQueue(java.lang.String)":{"name":"setQueue","returnType":"void","args":["java.lang.String"],"exceptions":[]},"long getStartTime()":{"name":"getStartTime","returnType":"long","args":[],"exceptions":[]},"void setStartTime(long)":{"name":"setStartTime","returnType":"void","args":["long"],"exceptions":[]},"org.apache.hadoop.yarn.api.records.Token getAMRMToken()":{"name":"getAMRMToken","returnType":"org.apache.hadoop.yarn.api.records.Token","args":[],"exceptions":[]},"java.lang.String getHost()":{"name":"getHost","returnType":"java.lang.String","args":[],"exceptions":[]},"java.lang.String getUser()":{"name":"getUser","returnType":"java.lang.String","args":[],"exceptions":[]},"void setDiagnostics(java.lang.String)":{"name":"setDiagnostics","returnType":"void","args":["java.lang.String"],"exceptions":[]},"void setOriginalTrackingUrl(java.lang.String)":{"name":"setOriginalTrackingUrl","returnType":"void","args":["java.lang.String"],"exceptions":[]},"void setApplicationTags(java.util.Set)":{"name":"setApplicationTags","returnType":"void","args":["java.util.Set"],"exceptions":[]},"java.lang.String getQueue()":{"name":"getQueue","returnType":"java.lang.String","args":[],"exceptions":[]},"org.apache.hadoop.yarn.api.records.YarnApplicationState getYarnApplicationState()":{"name":"getYarnApplicationState","returnType":"org.apache.hadoop.yarn.api.records.YarnApplicationState","args":[],"exceptions":[]},"void setTrackingUrl(java.lang.String)":{"name":"setTrackingUrl","returnType":"void","args":["java.lang.String"],"exceptions":[]},"void setHost(java.lang.String)":{"name":"setHost","returnType":"void","args":["java.lang.String"],"exceptions":[]},"void setClientToAMToken(org.apache.hadoop.yarn.api.records.Token)":{"name":"setClientToAMToken","returnType":"void","args":["org.apache.hadoop.yarn.api.records.Token"],"exceptions":[]},"void setAMRMToken(org.apache.hadoop.yarn.api.records.Token)":{"name":"setAMRMToken","returnType":"void","args":["org.apache.hadoop.yarn.api.records.Token"],"exceptions":[]},"int getRpcPort()":{"name":"getRpcPort","returnType":"int","args":[],"exceptions":[]},"void setRpcPort(int)":{"name":"setRpcPort","returnType":"void","args":["int"],"exceptions":[]},"void setApplicationId(org.apache.hadoop.yarn.api.records.ApplicationId)":{"name":"setApplicationId","returnType":"void","args":["org.apache.hadoop.yarn.api.records.ApplicationId"],"exceptions":[]},"java.lang.String getTrackingUrl()":{"name":"getTrackingUrl","returnType":"java.lang.String","args":[],"exceptions":[]},"java.lang.String getDiagnostics()":{"name":"getDiagnostics","returnType":"java.lang.String","args":[],"exceptions":[]},"void setProgress(float)":{"name":"setProgress","returnType":"void","args":["float"],"exceptions":[]},"java.lang.String getName()":{"name":"getName","returnType":"java.lang.String","args":[],"exceptions":[]},"java.lang.String getOriginalTrackingUrl()":{"name":"getOriginalTrackingUrl","returnType":"java.lang.String","args":[],"exceptions":[]},"java.lang.String getApplicationType()":{"name":"getApplicationType","returnType":"java.lang.String","args":[],"exceptions":[]},"void setFinishTime(long)":{"name":"setFinishTime","returnType":"void","args":["long"],"exceptions":[]},"void setCurrentApplicationAttemptId(org.apache.hadoop.yarn.api.records.ApplicationAttemptId)":{"name":"setCurrentApplicationAttemptId","returnType":"void","args":["org.apache.hadoop.yarn.api.records.ApplicationAttemptId"],"exceptions":[]},"org.apache.hadoop.yarn.api.records.ApplicationAttemptId getCurrentApplicationAttemptId()":{"name":"getCurrentApplicationAttemptId","returnType":"org.apache.hadoop.yarn.api.records.ApplicationAttemptId","args":[],"exceptions":[]}}},"org.apache.hadoop.yarn.api.records.Resource":{"name":"org.apache.hadoop.yarn.api.records.Resource","methods":{"int hashCode()":{"name":"hashCode","returnType":"int","args":[],"exceptions":[]},"org.apache.hadoop.yarn.api.records.Resource newInstance(int, int)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.records.Resource","args":["int","int"],"exceptions":[]},"void setVirtualCores(int)":{"name":"setVirtualCores","returnType":"void","args":["int"],"exceptions":[]},"java.lang.String toString()":{"name":"toString","returnType":"java.lang.String","args":[],"exceptions":[]},"void setMemory(int)":{"name":"setMemory","returnType":"void","args":["int"],"exceptions":[]},"int getMemory()":{"name":"getMemory","returnType":"int","args":[],"exceptions":[]},"int getVirtualCores()":{"name":"getVirtualCores","returnType":"int","args":[],"exceptions":[]},"boolean equals(java.lang.Object)":{"name":"equals","returnType":"boolean","args":["java.lang.Object"],"exceptions":[]}}},"org.apache.hadoop.yarn.api.records.NodeReport":{"name":"org.apache.hadoop.yarn.api.records.NodeReport","methods":{"void setCapability(org.apache.hadoop.yarn.api.records.Resource)":{"name":"setCapability","returnType":"void","args":["org.apache.hadoop.yarn.api.records.Resource"],"exceptions":[]},"int getNumContainers()":{"name":"getNumContainers","returnType":"int","args":[],"exceptions":[]},"org.apache.hadoop.yarn.api.records.Resource getUsed()":{"name":"getUsed","returnType":"org.apache.hadoop.yarn.api.records.Resource","args":[],"exceptions":[]},"org.apache.hadoop.yarn.api.records.NodeId getNodeId()":{"name":"getNodeId","returnType":"org.apache.hadoop.yarn.api.records.NodeId","args":[],"exceptions":[]},"long getLastHealthReportTime()":{"name":"getLastHealthReportTime","returnType":"long","args":[],"exceptions":[]},"void setNodeId(org.apache.hadoop.yarn.api.records.NodeId)":{"name":"setNodeId","returnType":"void","args":["org.apache.hadoop.yarn.api.records.NodeId"],"exceptions":[]},"void setNodeLabels(java.util.Set)":{"name":"setNodeLabels","returnType":"void","args":["java.util.Set"],"exceptions":[]},"org.apache.hadoop.yarn.api.records.Resource getCapability()":{"name":"getCapability","returnType":"org.apache.hadoop.yarn.api.records.Resource","args":[],"exceptions":[]},"void setHealthReport(java.lang.String)":{"name":"setHealthReport","returnType":"void","args":["java.lang.String"],"exceptions":[]},"void setRackName(java.lang.String)":{"name":"setRackName","returnType":"void","args":["java.lang.String"],"exceptions":[]},"void setLastHealthReportTime(long)":{"name":"setLastHealthReportTime","returnType":"void","args":["long"],"exceptions":[]},"void setHttpAddress(java.lang.String)":{"name":"setHttpAddress","returnType":"void","args":["java.lang.String"],"exceptions":[]},"java.lang.String getRackName()":{"name":"getRackName","returnType":"java.lang.String","args":[],"exceptions":[]},"void setUsed(org.apache.hadoop.yarn.api.records.Resource)":{"name":"setUsed","returnType":"void","args":["org.apache.hadoop.yarn.api.records.Resource"],"exceptions":[]},"java.lang.String getHealthReport()":{"name":"getHealthReport","returnType":"java.lang.String","args":[],"exceptions":[]},"void setNodeState(org.apache.hadoop.yarn.api.records.NodeState)":{"name":"setNodeState","returnType":"void","args":["org.apache.hadoop.yarn.api.records.NodeState"],"exceptions":[]},"org.apache.hadoop.yarn.api.records.NodeReport newInstance(org.apache.hadoop.yarn.api.records.NodeId, org.apache.hadoop.yarn.api.records.NodeState, java.lang.String, java.lang.String, org.apache.hadoop.yarn.api.records.Resource, org.apache.hadoop.yarn.api.records.Resource, int, java.lang.String, long)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.records.NodeReport","args":["org.apache.hadoop.yarn.api.records.NodeId","org.apache.hadoop.yarn.api.records.NodeState","java.lang.String","java.lang.String","org.apache.hadoop.yarn.api.records.Resource","org.apache.hadoop.yarn.api.records.Resource","int","java.lang.String","long"],"exceptions":[]},"org.apache.hadoop.yarn.api.records.NodeState getNodeState()":{"name":"getNodeState","returnType":"org.apache.hadoop.yarn.api.records.NodeState","args":[],"exceptions":[]},"org.apache.hadoop.yarn.api.records.NodeReport newInstance(org.apache.hadoop.yarn.api.records.NodeId, org.apache.hadoop.yarn.api.records.NodeState, java.lang.String, java.lang.String, org.apache.hadoop.yarn.api.records.Resource, org.apache.hadoop.yarn.api.records.Resource, int, java.lang.String, long, java.util.Set)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.records.NodeReport","args":["org.apache.hadoop.yarn.api.records.NodeId","org.apache.hadoop.yarn.api.records.NodeState","java.lang.String","java.lang.String","org.apache.hadoop.yarn.api.records.Resource","org.apache.hadoop.yarn.api.records.Resource","int","java.lang.String","long","java.util.Set"],"exceptions":[]},"java.lang.String getHttpAddress()":{"name":"getHttpAddress","returnType":"java.lang.String","args":[],"exceptions":[]},"void setNumContainers(int)":{"name":"setNumContainers","returnType":"void","args":["int"],"exceptions":[]},"java.util.Set getNodeLabels()":{"name":"getNodeLabels","returnType":"java.util.Set","args":[],"exceptions":[]}}},"org.apache.hadoop.yarn.api.records.Priority":{"name":"org.apache.hadoop.yarn.api.records.Priority","methods":{"void setPriority(int)":{"name":"setPriority","returnType":"void","args":["int"],"exceptions":[]},"int hashCode()":{"name":"hashCode","returnType":"int","args":[],"exceptions":[]},"java.lang.String toString()":{"name":"toString","returnType":"java.lang.String","args":[],"exceptions":[]},"int compareTo(org.apache.hadoop.yarn.api.records.Priority)":{"name":"compareTo","returnType":"int","args":["org.apache.hadoop.yarn.api.records.Priority"],"exceptions":[]},"org.apache.hadoop.yarn.api.records.Priority newInstance(int)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.records.Priority","args":["int"],"exceptions":[]},"int getPriority()":{"name":"getPriority","returnType":"int","args":[],"exceptions":[]},"int compareTo(java.lang.Object)":{"name":"compareTo","returnType":"int","args":["java.lang.Object"],"exceptions":[]},"boolean equals(java.lang.Object)":{"name":"equals","returnType":"boolean","args":["java.lang.Object"],"exceptions":[]}}},"org.apache.hadoop.yarn.api.records.ApplicationAttemptId":{"name":"org.apache.hadoop.yarn.api.records.ApplicationAttemptId","methods":{"int compareTo(org.apache.hadoop.yarn.api.records.ApplicationAttemptId)":{"name":"compareTo","returnType":"int","args":["org.apache.hadoop.yarn.api.records.ApplicationAttemptId"],"exceptions":[]},"int hashCode()":{"name":"hashCode","returnType":"int","args":[],"exceptions":[]},"java.lang.String toString()":{"name":"toString","returnType":"java.lang.String","args":[],"exceptions":[]},"org.apache.hadoop.yarn.api.records.ApplicationId getApplicationId()":{"name":"getApplicationId","returnType":"org.apache.hadoop.yarn.api.records.ApplicationId","args":[],"exceptions":[]},"int getAttemptId()":{"name":"getAttemptId","returnType":"int","args":[],"exceptions":[]},"org.apache.hadoop.yarn.api.records.ApplicationAttemptId newInstance(org.apache.hadoop.yarn.api.records.ApplicationId, int)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.records.ApplicationAttemptId","args":["org.apache.hadoop.yarn.api.records.ApplicationId","int"],"exceptions":[]},"int compareTo(java.lang.Object)":{"name":"compareTo","returnType":"int","args":["java.lang.Object"],"exceptions":[]},"boolean equals(java.lang.Object)":{"name":"equals","returnType":"boolean","args":["java.lang.Object"],"exceptions":[]}}},"org.apache.hadoop.yarn.api.records.NMToken":{"name":"org.apache.hadoop.yarn.api.records.NMToken","methods":{"int hashCode()":{"name":"hashCode","returnType":"int","args":[],"exceptions":[]},"void setToken(org.apache.hadoop.yarn.api.records.Token)":{"name":"setToken","returnType":"void","args":["org.apache.hadoop.yarn.api.records.Token"],"exceptions":[]},"org.apache.hadoop.yarn.api.records.NodeId getNodeId()":{"name":"getNodeId","returnType":"org.apache.hadoop.yarn.api.records.NodeId","args":[],"exceptions":[]},"void setNodeId(org.apache.hadoop.yarn.api.records.NodeId)":{"name":"setNodeId","returnType":"void","args":["org.apache.hadoop.yarn.api.records.NodeId"],"exceptions":[]},"org.apache.hadoop.yarn.api.records.Token getToken()":{"name":"getToken","returnType":"org.apache.hadoop.yarn.api.records.Token","args":[],"exceptions":[]},"org.apache.hadoop.yarn.api.records.NMToken newInstance(org.apache.hadoop.yarn.api.records.NodeId, org.apache.hadoop.yarn.api.records.Token)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.records.NMToken","args":["org.apache.hadoop.yarn.api.records.NodeId","org.apache.hadoop.yarn.api.records.Token"],"exceptions":[]},"boolean equals(java.lang.Object)":{"name":"equals","returnType":"boolean","args":["java.lang.Object"],"exceptions":[]}}},"org.apache.hadoop.yarn.api.protocolrecords.FinishApplicationMasterRequest":{"name":"org.apache.hadoop.yarn.api.protocolrecords.FinishApplicationMasterRequest","methods":{"void setFinalApplicationStatus(org.apache.hadoop.yarn.api.records.FinalApplicationStatus)":{"name":"setFinalApplicationStatus","returnType":"void","args":["org.apache.hadoop.yarn.api.records.FinalApplicationStatus"],"exceptions":[]},"org.apache.hadoop.yarn.api.records.FinalApplicationStatus getFinalApplicationStatus()":{"name":"getFinalApplicationStatus","returnType":"org.apache.hadoop.yarn.api.records.FinalApplicationStatus","args":[],"exceptions":[]},"void setTrackingUrl(java.lang.String)":{"name":"setTrackingUrl","returnType":"void","args":["java.lang.String"],"exceptions":[]},"org.apache.hadoop.yarn.api.protocolrecords.FinishApplicationMasterRequest newInstance(org.apache.hadoop.yarn.api.records.FinalApplicationStatus, java.lang.String, java.lang.String)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.protocolrecords.FinishApplicationMasterRequest","args":["org.apache.hadoop.yarn.api.records.FinalApplicationStatus","java.lang.String","java.lang.String"],"exceptions":[]},"void setDiagnostics(java.lang.String)":{"name":"setDiagnostics","returnType":"void","args":["java.lang.String"],"exceptions":[]},"java.lang.String getTrackingUrl()":{"name":"getTrackingUrl","returnType":"java.lang.String","args":[],"exceptions":[]},"java.lang.String getDiagnostics()":{"name":"getDiagnostics","returnType":"java.lang.String","args":[],"exceptions":[]}}},"org.apache.hadoop.yarn.api.protocolrecords.SubmitApplicationResponse":{"name":"org.apache.hadoop.yarn.api.protocolrecords.SubmitApplicationResponse","methods":{"org.apache.hadoop.yarn.api.protocolrecords.SubmitApplicationResponse newInstance()":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.protocolrecords.SubmitApplicationResponse","args":[],"exceptions":[]}}},"org.apache.hadoop.yarn.api.protocolrecords.GetClusterMetricsRequest":{"name":"org.apache.hadoop.yarn.api.protocolrecords.GetClusterMetricsRequest","methods":{"org.apache.hadoop.yarn.api.protocolrecords.GetClusterMetricsRequest newInstance()":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.protocolrecords.GetClusterMetricsRequest","args":[],"exceptions":[]}}},"org.apache.hadoop.yarn.api.protocolrecords.StartContainersRequest":{"name":"org.apache.hadoop.yarn.api.protocolrecords.StartContainersRequest","methods":{"java.util.List getStartContainerRequests()":{"name":"getStartContainerRequests","returnType":"java.util.List","args":[],"exceptions":[]},"void setStartContainerRequests(java.util.List)":{"name":"setStartContainerRequests","returnType":"void","args":["java.util.List"],"exceptions":[]},"org.apache.hadoop.yarn.api.protocolrecords.StartContainersRequest newInstance(java.util.List)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.protocolrecords.StartContainersRequest","args":["java.util.List"],"exceptions":[]}}},"org.apache.hadoop.yarn.api.records.ContainerLaunchContext":{"name":"org.apache.hadoop.yarn.api.records.ContainerLaunchContext","methods":{"java.util.Map getApplicationACLs()":{"name":"getApplicationACLs","returnType":"java.util.Map","args":[],"exceptions":[]},"java.util.Map getServiceData()":{"name":"getServiceData","returnType":"java.util.Map","args":[],"exceptions":[]},"void setApplicationACLs(java.util.Map)":{"name":"setApplicationACLs","returnType":"void","args":["java.util.Map"],"exceptions":[]},"java.util.Map getLocalResources()":{"name":"getLocalResources","returnType":"java.util.Map","args":[],"exceptions":[]},"void setServiceData(java.util.Map)":{"name":"setServiceData","returnType":"void","args":["java.util.Map"],"exceptions":[]},"org.apache.hadoop.yarn.api.records.ContainerLaunchContext newInstance(java.util.Map, java.util.Map, java.util.List, java.util.Map, java.nio.ByteBuffer, java.util.Map)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.records.ContainerLaunchContext","args":["java.util.Map","java.util.Map","java.util.List","java.util.Map","java.nio.ByteBuffer","java.util.Map"],"exceptions":[]},"java.util.Map getEnvironment()":{"name":"getEnvironment","returnType":"java.util.Map","args":[],"exceptions":[]},"java.util.List getCommands()":{"name":"getCommands","returnType":"java.util.List","args":[],"exceptions":[]},"java.nio.ByteBuffer getTokens()":{"name":"getTokens","returnType":"java.nio.ByteBuffer","args":[],"exceptions":[]},"void setLocalResources(java.util.Map)":{"name":"setLocalResources","returnType":"void","args":["java.util.Map"],"exceptions":[]},"void setCommands(java.util.List)":{"name":"setCommands","returnType":"void","args":["java.util.List"],"exceptions":[]},"void setTokens(java.nio.ByteBuffer)":{"name":"setTokens","returnType":"void","args":["java.nio.ByteBuffer"],"exceptions":[]},"void setEnvironment(java.util.Map)":{"name":"setEnvironment","returnType":"void","args":["java.util.Map"],"exceptions":[]}}},"org.apache.hadoop.yarn.api.protocolrecords.GetApplicationReportRequest":{"name":"org.apache.hadoop.yarn.api.protocolrecords.GetApplicationReportRequest","methods":{"org.apache.hadoop.yarn.api.records.ApplicationId getApplicationId()":{"name":"getApplicationId","returnType":"org.apache.hadoop.yarn.api.records.ApplicationId","args":[],"exceptions":[]},"void setApplicationId(org.apache.hadoop.yarn.api.records.ApplicationId)":{"name":"setApplicationId","returnType":"void","args":["org.apache.hadoop.yarn.api.records.ApplicationId"],"exceptions":[]},"org.apache.hadoop.yarn.api.protocolrecords.GetApplicationReportRequest newInstance(org.apache.hadoop.yarn.api.records.ApplicationId)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.protocolrecords.GetApplicationReportRequest","args":["org.apache.hadoop.yarn.api.records.ApplicationId"],"exceptions":[]}}},"org.apache.hadoop.yarn.api.protocolrecords.StopContainersResponse":{"name":"org.apache.hadoop.yarn.api.protocolrecords.StopContainersResponse","methods":{"void setFailedRequests(java.util.Map)":{"name":"setFailedRequests","returnType":"void","args":["java.util.Map"],"exceptions":[]},"void setSuccessfullyStoppedContainers(java.util.List)":{"name":"setSuccessfullyStoppedContainers","returnType":"void","args":["java.util.List"],"exceptions":[]},"java.util.List getSuccessfullyStoppedContainers()":{"name":"getSuccessfullyStoppedContainers","returnType":"java.util.List","args":[],"exceptions":[]},"org.apache.hadoop.yarn.api.protocolrecords.StopContainersResponse newInstance(java.util.List, java.util.Map)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.protocolrecords.StopContainersResponse","args":["java.util.List","java.util.Map"],"exceptions":[]},"java.util.Map getFailedRequests()":{"name":"getFailedRequests","returnType":"java.util.Map","args":[],"exceptions":[]}}},"org.apache.hadoop.yarn.api.records.QueueState":{"name":"org.apache.hadoop.yarn.api.records.QueueState","methods":{"org.apache.hadoop.yarn.api.records.QueueState valueOf(java.lang.String)":{"name":"valueOf","returnType":"org.apache.hadoop.yarn.api.records.QueueState","args":["java.lang.String"],"exceptions":[]},"[Lorg.apache.hadoop.yarn.api.records.QueueState; values()":{"name":"values","returnType":"[Lorg.apache.hadoop.yarn.api.records.QueueState;","args":[],"exceptions":[]}}},"org.apache.hadoop.yarn.api.records.ApplicationId":{"name":"org.apache.hadoop.yarn.api.records.ApplicationId","methods":{"org.apache.hadoop.yarn.api.records.ApplicationId newInstance(long, int)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.records.ApplicationId","args":["long","int"],"exceptions":[]},"int hashCode()":{"name":"hashCode","returnType":"int","args":[],"exceptions":[]},"long getClusterTimestamp()":{"name":"getClusterTimestamp","returnType":"long","args":[],"exceptions":[]},"java.lang.String toString()":{"name":"toString","returnType":"java.lang.String","args":[],"exceptions":[]},"int getId()":{"name":"getId","returnType":"int","args":[],"exceptions":[]},"int compareTo(java.lang.Object)":{"name":"compareTo","returnType":"int","args":["java.lang.Object"],"exceptions":[]},"boolean equals(java.lang.Object)":{"name":"equals","returnType":"boolean","args":["java.lang.Object"],"exceptions":[]},"int compareTo(org.apache.hadoop.yarn.api.records.ApplicationId)":{"name":"compareTo","returnType":"int","args":["org.apache.hadoop.yarn.api.records.ApplicationId"],"exceptions":[]}}},"org.apache.hadoop.yarn.api.protocolrecords.GetClusterMetricsResponse":{"name":"org.apache.hadoop.yarn.api.protocolrecords.GetClusterMetricsResponse","methods":{"org.apache.hadoop.yarn.api.records.YarnClusterMetrics getClusterMetrics()":{"name":"getClusterMetrics","returnType":"org.apache.hadoop.yarn.api.records.YarnClusterMetrics","args":[],"exceptions":[]},"org.apache.hadoop.yarn.api.protocolrecords.GetClusterMetricsResponse newInstance(org.apache.hadoop.yarn.api.records.YarnClusterMetrics)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.protocolrecords.GetClusterMetricsResponse","args":["org.apache.hadoop.yarn.api.records.YarnClusterMetrics"],"exceptions":[]},"void setClusterMetrics(org.apache.hadoop.yarn.api.records.YarnClusterMetrics)":{"name":"setClusterMetrics","returnType":"void","args":["org.apache.hadoop.yarn.api.records.YarnClusterMetrics"],"exceptions":[]}}},"org.apache.hadoop.yarn.api.protocolrecords.GetQueueUserAclsInfoResponse":{"name":"org.apache.hadoop.yarn.api.protocolrecords.GetQueueUserAclsInfoResponse","methods":{"org.apache.hadoop.yarn.api.protocolrecords.GetQueueUserAclsInfoResponse newInstance(java.util.List)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.protocolrecords.GetQueueUserAclsInfoResponse","args":["java.util.List"],"exceptions":[]},"java.util.List getUserAclsInfoList()":{"name":"getUserAclsInfoList","returnType":"java.util.List","args":[],"exceptions":[]},"void setUserAclsInfoList(java.util.List)":{"name":"setUserAclsInfoList","returnType":"void","args":["java.util.List"],"exceptions":[]}}},"org.apache.hadoop.yarn.api.records.FinalApplicationStatus":{"name":"org.apache.hadoop.yarn.api.records.FinalApplicationStatus","methods":{"[Lorg.apache.hadoop.yarn.api.records.FinalApplicationStatus; values()":{"name":"values","returnType":"[Lorg.apache.hadoop.yarn.api.records.FinalApplicationStatus;","args":[],"exceptions":[]},"org.apache.hadoop.yarn.api.records.FinalApplicationStatus valueOf(java.lang.String)":{"name":"valueOf","returnType":"org.apache.hadoop.yarn.api.records.FinalApplicationStatus","args":["java.lang.String"],"exceptions":[]}}},"org.apache.hadoop.yarn.api.protocolrecords.GetApplicationsResponse":{"name":"org.apache.hadoop.yarn.api.protocolrecords.GetApplicationsResponse","methods":{"java.util.List getApplicationList()":{"name":"getApplicationList","returnType":"java.util.List","args":[],"exceptions":[]},"org.apache.hadoop.yarn.api.protocolrecords.GetApplicationsResponse newInstance(java.util.List)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.protocolrecords.GetApplicationsResponse","args":["java.util.List"],"exceptions":[]},"void setApplicationList(java.util.List)":{"name":"setApplicationList","returnType":"void","args":["java.util.List"],"exceptions":[]}}},"org.apache.hadoop.yarn.api.records.YarnApplicationState":{"name":"org.apache.hadoop.yarn.api.records.YarnApplicationState","methods":{"[Lorg.apache.hadoop.yarn.api.records.YarnApplicationState; values()":{"name":"values","returnType":"[Lorg.apache.hadoop.yarn.api.records.YarnApplicationState;","args":[],"exceptions":[]},"org.apache.hadoop.yarn.api.records.YarnApplicationState valueOf(java.lang.String)":{"name":"valueOf","returnType":"org.apache.hadoop.yarn.api.records.YarnApplicationState","args":["java.lang.String"],"exceptions":[]}}},"org.apache.hadoop.yarn.api.records.URL":{"name":"org.apache.hadoop.yarn.api.records.URL","methods":{"java.lang.String getFile()":{"name":"getFile","returnType":"java.lang.String","args":[],"exceptions":[]},"void setPort(int)":{"name":"setPort","returnType":"void","args":["int"],"exceptions":[]},"void setUserInfo(java.lang.String)":{"name":"setUserInfo","returnType":"void","args":["java.lang.String"],"exceptions":[]},"void setHost(java.lang.String)":{"name":"setHost","returnType":"void","args":["java.lang.String"],"exceptions":[]},"void setScheme(java.lang.String)":{"name":"setScheme","returnType":"void","args":["java.lang.String"],"exceptions":[]},"java.lang.String getHost()":{"name":"getHost","returnType":"java.lang.String","args":[],"exceptions":[]},"void setFile(java.lang.String)":{"name":"setFile","returnType":"void","args":["java.lang.String"],"exceptions":[]},"java.lang.String getScheme()":{"name":"getScheme","returnType":"java.lang.String","args":[],"exceptions":[]},"java.lang.String getUserInfo()":{"name":"getUserInfo","returnType":"java.lang.String","args":[],"exceptions":[]},"org.apache.hadoop.yarn.api.records.URL newInstance(java.lang.String, java.lang.String, int, java.lang.String)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.records.URL","args":["java.lang.String","java.lang.String","int","java.lang.String"],"exceptions":[]},"int getPort()":{"name":"getPort","returnType":"int","args":[],"exceptions":[]}}},"org.apache.hadoop.yarn.api.protocolrecords.FinishApplicationMasterResponse":{"name":"org.apache.hadoop.yarn.api.protocolrecords.FinishApplicationMasterResponse","methods":{"void setIsUnregistered(boolean)":{"name":"setIsUnregistered","returnType":"void","args":["boolean"],"exceptions":[]},"org.apache.hadoop.yarn.api.protocolrecords.FinishApplicationMasterResponse newInstance(boolean)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.protocolrecords.FinishApplicationMasterResponse","args":["boolean"],"exceptions":[]},"boolean getIsUnregistered()":{"name":"getIsUnregistered","returnType":"boolean","args":[],"exceptions":[]}}},"org.apache.hadoop.yarn.api.ApplicationMasterProtocol":{"name":"org.apache.hadoop.yarn.api.ApplicationMasterProtocol","methods":{"org.apache.hadoop.yarn.api.protocolrecords.FinishApplicationMasterResponse finishApplicationMaster(org.apache.hadoop.yarn.api.protocolrecords.FinishApplicationMasterRequest) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"finishApplicationMaster","returnType":"org.apache.hadoop.yarn.api.protocolrecords.FinishApplicationMasterResponse","args":["org.apache.hadoop.yarn.api.protocolrecords.FinishApplicationMasterRequest"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"org.apache.hadoop.yarn.api.protocolrecords.RegisterApplicationMasterResponse registerApplicationMaster(org.apache.hadoop.yarn.api.protocolrecords.RegisterApplicationMasterRequest) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"registerApplicationMaster","returnType":"org.apache.hadoop.yarn.api.protocolrecords.RegisterApplicationMasterResponse","args":["org.apache.hadoop.yarn.api.protocolrecords.RegisterApplicationMasterRequest"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"org.apache.hadoop.yarn.api.protocolrecords.AllocateResponse allocate(org.apache.hadoop.yarn.api.protocolrecords.AllocateRequest) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"allocate","returnType":"org.apache.hadoop.yarn.api.protocolrecords.AllocateResponse","args":["org.apache.hadoop.yarn.api.protocolrecords.AllocateRequest"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]}}},"org.apache.hadoop.yarn.api.protocolrecords.GetApplicationReportResponse":{"name":"org.apache.hadoop.yarn.api.protocolrecords.GetApplicationReportResponse","methods":{"void setApplicationReport(org.apache.hadoop.yarn.api.records.ApplicationReport)":{"name":"setApplicationReport","returnType":"void","args":["org.apache.hadoop.yarn.api.records.ApplicationReport"],"exceptions":[]},"org.apache.hadoop.yarn.api.records.ApplicationReport getApplicationReport()":{"name":"getApplicationReport","returnType":"org.apache.hadoop.yarn.api.records.ApplicationReport","args":[],"exceptions":[]},"org.apache.hadoop.yarn.api.protocolrecords.GetApplicationReportResponse newInstance(org.apache.hadoop.yarn.api.records.ApplicationReport)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.protocolrecords.GetApplicationReportResponse","args":["org.apache.hadoop.yarn.api.records.ApplicationReport"],"exceptions":[]}}},"org.apache.hadoop.yarn.api.protocolrecords.StopContainersRequest":{"name":"org.apache.hadoop.yarn.api.protocolrecords.StopContainersRequest","methods":{"void setContainerIds(java.util.List)":{"name":"setContainerIds","returnType":"void","args":["java.util.List"],"exceptions":[]},"org.apache.hadoop.yarn.api.protocolrecords.StopContainersRequest newInstance(java.util.List)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.protocolrecords.StopContainersRequest","args":["java.util.List"],"exceptions":[]},"java.util.List getContainerIds()":{"name":"getContainerIds","returnType":"java.util.List","args":[],"exceptions":[]}}},"org.apache.hadoop.yarn.api.protocolrecords.GetApplicationsRequest":{"name":"org.apache.hadoop.yarn.api.protocolrecords.GetApplicationsRequest","methods":{"void setStartRange(long, long) throws java.lang.IllegalArgumentException":{"name":"setStartRange","returnType":"void","args":["long","long"],"exceptions":["java.lang.IllegalArgumentException"]},"java.util.Set getApplicationTags()":{"name":"getApplicationTags","returnType":"java.util.Set","args":[],"exceptions":[]},"org.apache.hadoop.yarn.api.protocolrecords.ApplicationsRequestScope getScope()":{"name":"getScope","returnType":"org.apache.hadoop.yarn.api.protocolrecords.ApplicationsRequestScope","args":[],"exceptions":[]},"void setScope(org.apache.hadoop.yarn.api.protocolrecords.ApplicationsRequestScope)":{"name":"setScope","returnType":"void","args":["org.apache.hadoop.yarn.api.protocolrecords.ApplicationsRequestScope"],"exceptions":[]},"org.apache.hadoop.yarn.api.protocolrecords.GetApplicationsRequest newInstance(org.apache.hadoop.yarn.api.protocolrecords.ApplicationsRequestScope)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.protocolrecords.GetApplicationsRequest","args":["org.apache.hadoop.yarn.api.protocolrecords.ApplicationsRequestScope"],"exceptions":[]},"org.apache.hadoop.yarn.api.protocolrecords.GetApplicationsRequest newInstance(org.apache.hadoop.yarn.api.protocolrecords.ApplicationsRequestScope, java.util.Set, java.util.Set, java.util.Set, java.util.Set, java.util.EnumSet, org.apache.commons.lang.math.LongRange, org.apache.commons.lang.math.LongRange, java.lang.Long)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.protocolrecords.GetApplicationsRequest","args":["org.apache.hadoop.yarn.api.protocolrecords.ApplicationsRequestScope","java.util.Set","java.util.Set","java.util.Set","java.util.Set","java.util.EnumSet","org.apache.commons.lang.math.LongRange","org.apache.commons.lang.math.LongRange","java.lang.Long"],"exceptions":[]},"long getLimit()":{"name":"getLimit","returnType":"long","args":[],"exceptions":[]},"java.util.EnumSet getApplicationStates()":{"name":"getApplicationStates","returnType":"java.util.EnumSet","args":[],"exceptions":[]},"void setFinishRange(org.apache.commons.lang.math.LongRange)":{"name":"setFinishRange","returnType":"void","args":["org.apache.commons.lang.math.LongRange"],"exceptions":[]},"void setUsers(java.util.Set)":{"name":"setUsers","returnType":"void","args":["java.util.Set"],"exceptions":[]},"org.apache.commons.lang.math.LongRange getFinishRange()":{"name":"getFinishRange","returnType":"org.apache.commons.lang.math.LongRange","args":[],"exceptions":[]},"void setApplicationTags(java.util.Set)":{"name":"setApplicationTags","returnType":"void","args":["java.util.Set"],"exceptions":[]},"void setApplicationStates(java.util.EnumSet)":{"name":"setApplicationStates","returnType":"void","args":["java.util.EnumSet"],"exceptions":[]},"org.apache.hadoop.yarn.api.protocolrecords.GetApplicationsRequest newInstance(java.util.EnumSet)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.protocolrecords.GetApplicationsRequest","args":["java.util.EnumSet"],"exceptions":[]},"java.util.Set getQueues()":{"name":"getQueues","returnType":"java.util.Set","args":[],"exceptions":[]},"java.util.Set getUsers()":{"name":"getUsers","returnType":"java.util.Set","args":[],"exceptions":[]},"void setLimit(long)":{"name":"setLimit","returnType":"void","args":["long"],"exceptions":[]},"org.apache.hadoop.yarn.api.protocolrecords.GetApplicationsRequest newInstance(java.util.Set, java.util.EnumSet)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.protocolrecords.GetApplicationsRequest","args":["java.util.Set","java.util.EnumSet"],"exceptions":[]},"void setApplicationStates(java.util.Set)":{"name":"setApplicationStates","returnType":"void","args":["java.util.Set"],"exceptions":[]},"org.apache.commons.lang.math.LongRange getStartRange()":{"name":"getStartRange","returnType":"org.apache.commons.lang.math.LongRange","args":[],"exceptions":[]},"void setApplicationTypes(java.util.Set)":{"name":"setApplicationTypes","returnType":"void","args":["java.util.Set"],"exceptions":[]},"void setQueues(java.util.Set)":{"name":"setQueues","returnType":"void","args":["java.util.Set"],"exceptions":[]},"void setFinishRange(long, long)":{"name":"setFinishRange","returnType":"void","args":["long","long"],"exceptions":[]},"org.apache.hadoop.yarn.api.protocolrecords.GetApplicationsRequest newInstance(java.util.Set)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.protocolrecords.GetApplicationsRequest","args":["java.util.Set"],"exceptions":[]},"void setStartRange(org.apache.commons.lang.math.LongRange)":{"name":"setStartRange","returnType":"void","args":["org.apache.commons.lang.math.LongRange"],"exceptions":[]},"org.apache.hadoop.yarn.api.protocolrecords.GetApplicationsRequest newInstance()":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.protocolrecords.GetApplicationsRequest","args":[],"exceptions":[]},"java.util.Set getApplicationTypes()":{"name":"getApplicationTypes","returnType":"java.util.Set","args":[],"exceptions":[]}}},"org.apache.hadoop.yarn.api.records.LocalResourceType":{"name":"org.apache.hadoop.yarn.api.records.LocalResourceType","methods":{"[Lorg.apache.hadoop.yarn.api.records.LocalResourceType; values()":{"name":"values","returnType":"[Lorg.apache.hadoop.yarn.api.records.LocalResourceType;","args":[],"exceptions":[]},"org.apache.hadoop.yarn.api.records.LocalResourceType valueOf(java.lang.String)":{"name":"valueOf","returnType":"org.apache.hadoop.yarn.api.records.LocalResourceType","args":["java.lang.String"],"exceptions":[]}}},"org.apache.hadoop.yarn.api.records.ApplicationResourceUsageReport":{"name":"org.apache.hadoop.yarn.api.records.ApplicationResourceUsageReport","methods":{"long getVcoreSeconds()":{"name":"getVcoreSeconds","returnType":"long","args":[],"exceptions":[]},"int getNumUsedContainers()":{"name":"getNumUsedContainers","returnType":"int","args":[],"exceptions":[]},"long getMemorySeconds()":{"name":"getMemorySeconds","returnType":"long","args":[],"exceptions":[]},"void setMemorySeconds(long)":{"name":"setMemorySeconds","returnType":"void","args":["long"],"exceptions":[]},"void setUsedResources(org.apache.hadoop.yarn.api.records.Resource)":{"name":"setUsedResources","returnType":"void","args":["org.apache.hadoop.yarn.api.records.Resource"],"exceptions":[]},"void setNeededResources(org.apache.hadoop.yarn.api.records.Resource)":{"name":"setNeededResources","returnType":"void","args":["org.apache.hadoop.yarn.api.records.Resource"],"exceptions":[]},"org.apache.hadoop.yarn.api.records.Resource getReservedResources()":{"name":"getReservedResources","returnType":"org.apache.hadoop.yarn.api.records.Resource","args":[],"exceptions":[]},"org.apache.hadoop.yarn.api.records.Resource getNeededResources()":{"name":"getNeededResources","returnType":"org.apache.hadoop.yarn.api.records.Resource","args":[],"exceptions":[]},"void setNumUsedContainers(int)":{"name":"setNumUsedContainers","returnType":"void","args":["int"],"exceptions":[]},"void setNumReservedContainers(int)":{"name":"setNumReservedContainers","returnType":"void","args":["int"],"exceptions":[]},"void setVcoreSeconds(long)":{"name":"setVcoreSeconds","returnType":"void","args":["long"],"exceptions":[]},"int getNumReservedContainers()":{"name":"getNumReservedContainers","returnType":"int","args":[],"exceptions":[]},"org.apache.hadoop.yarn.api.records.ApplicationResourceUsageReport newInstance(int, int, org.apache.hadoop.yarn.api.records.Resource, org.apache.hadoop.yarn.api.records.Resource, org.apache.hadoop.yarn.api.records.Resource, long, long)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.records.ApplicationResourceUsageReport","args":["int","int","org.apache.hadoop.yarn.api.records.Resource","org.apache.hadoop.yarn.api.records.Resource","org.apache.hadoop.yarn.api.records.Resource","long","long"],"exceptions":[]},"org.apache.hadoop.yarn.api.records.Resource getUsedResources()":{"name":"getUsedResources","returnType":"org.apache.hadoop.yarn.api.records.Resource","args":[],"exceptions":[]},"void setReservedResources(org.apache.hadoop.yarn.api.records.Resource)":{"name":"setReservedResources","returnType":"void","args":["org.apache.hadoop.yarn.api.records.Resource"],"exceptions":[]}}},"org.apache.hadoop.yarn.api.protocolrecords.GetQueueInfoRequest":{"name":"org.apache.hadoop.yarn.api.protocolrecords.GetQueueInfoRequest","methods":{"void setQueueName(java.lang.String)":{"name":"setQueueName","returnType":"void","args":["java.lang.String"],"exceptions":[]},"org.apache.hadoop.yarn.api.protocolrecords.GetQueueInfoRequest newInstance(java.lang.String, boolean, boolean, boolean)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.protocolrecords.GetQueueInfoRequest","args":["java.lang.String","boolean","boolean","boolean"],"exceptions":[]},"boolean getRecursive()":{"name":"getRecursive","returnType":"boolean","args":[],"exceptions":[]},"java.lang.String getQueueName()":{"name":"getQueueName","returnType":"java.lang.String","args":[],"exceptions":[]},"void setIncludeChildQueues(boolean)":{"name":"setIncludeChildQueues","returnType":"void","args":["boolean"],"exceptions":[]},"boolean getIncludeApplications()":{"name":"getIncludeApplications","returnType":"boolean","args":[],"exceptions":[]},"boolean getIncludeChildQueues()":{"name":"getIncludeChildQueues","returnType":"boolean","args":[],"exceptions":[]},"void setRecursive(boolean)":{"name":"setRecursive","returnType":"void","args":["boolean"],"exceptions":[]},"void setIncludeApplications(boolean)":{"name":"setIncludeApplications","returnType":"void","args":["boolean"],"exceptions":[]}}},"org.apache.hadoop.yarn.api.protocolrecords.AllocateResponse":{"name":"org.apache.hadoop.yarn.api.protocolrecords.AllocateResponse","methods":{"void setIncreasedContainers(java.util.List)":{"name":"setIncreasedContainers","returnType":"void","args":["java.util.List"],"exceptions":[]},"void setDecreasedContainers(java.util.List)":{"name":"setDecreasedContainers","returnType":"void","args":["java.util.List"],"exceptions":[]},"org.apache.hadoop.yarn.api.protocolrecords.AllocateResponse newInstance(int, java.util.List, java.util.List, java.util.List, org.apache.hadoop.yarn.api.records.Resource, org.apache.hadoop.yarn.api.records.AMCommand, int, org.apache.hadoop.yarn.api.records.PreemptionMessage, java.util.List, java.util.List, java.util.List)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.protocolrecords.AllocateResponse","args":["int","java.util.List","java.util.List","java.util.List","org.apache.hadoop.yarn.api.records.Resource","org.apache.hadoop.yarn.api.records.AMCommand","int","org.apache.hadoop.yarn.api.records.PreemptionMessage","java.util.List","java.util.List","java.util.List"],"exceptions":[]},"void setUpdatedNodes(java.util.List)":{"name":"setUpdatedNodes","returnType":"void","args":["java.util.List"],"exceptions":[]},"void setResponseId(int)":{"name":"setResponseId","returnType":"void","args":["int"],"exceptions":[]},"java.util.List getNMTokens()":{"name":"getNMTokens","returnType":"java.util.List","args":[],"exceptions":[]},"java.util.List getUpdatedNodes()":{"name":"getUpdatedNodes","returnType":"java.util.List","args":[],"exceptions":[]},"java.util.List getIncreasedContainers()":{"name":"getIncreasedContainers","returnType":"java.util.List","args":[],"exceptions":[]},"org.apache.hadoop.yarn.api.records.AMCommand getAMCommand()":{"name":"getAMCommand","returnType":"org.apache.hadoop.yarn.api.records.AMCommand","args":[],"exceptions":[]},"void setNMTokens(java.util.List)":{"name":"setNMTokens","returnType":"void","args":["java.util.List"],"exceptions":[]},"org.apache.hadoop.yarn.api.records.Token getAMRMToken()":{"name":"getAMRMToken","returnType":"org.apache.hadoop.yarn.api.records.Token","args":[],"exceptions":[]},"void setAMCommand(org.apache.hadoop.yarn.api.records.AMCommand)":{"name":"setAMCommand","returnType":"void","args":["org.apache.hadoop.yarn.api.records.AMCommand"],"exceptions":[]},"void setAllocatedContainers(java.util.List)":{"name":"setAllocatedContainers","returnType":"void","args":["java.util.List"],"exceptions":[]},"int getNumClusterNodes()":{"name":"getNumClusterNodes","returnType":"int","args":[],"exceptions":[]},"void setNumClusterNodes(int)":{"name":"setNumClusterNodes","returnType":"void","args":["int"],"exceptions":[]},"void setCompletedContainersStatuses(java.util.List)":{"name":"setCompletedContainersStatuses","returnType":"void","args":["java.util.List"],"exceptions":[]},"void setAMRMToken(org.apache.hadoop.yarn.api.records.Token)":{"name":"setAMRMToken","returnType":"void","args":["org.apache.hadoop.yarn.api.records.Token"],"exceptions":[]},"org.apache.hadoop.yarn.api.records.PreemptionMessage getPreemptionMessage()":{"name":"getPreemptionMessage","returnType":"org.apache.hadoop.yarn.api.records.PreemptionMessage","args":[],"exceptions":[]},"java.util.List getCompletedContainersStatuses()":{"name":"getCompletedContainersStatuses","returnType":"java.util.List","args":[],"exceptions":[]},"org.apache.hadoop.yarn.api.records.Resource getAvailableResources()":{"name":"getAvailableResources","returnType":"org.apache.hadoop.yarn.api.records.Resource","args":[],"exceptions":[]},"void setAvailableResources(org.apache.hadoop.yarn.api.records.Resource)":{"name":"setAvailableResources","returnType":"void","args":["org.apache.hadoop.yarn.api.records.Resource"],"exceptions":[]},"org.apache.hadoop.yarn.api.protocolrecords.AllocateResponse newInstance(int, java.util.List, java.util.List, java.util.List, org.apache.hadoop.yarn.api.records.Resource, org.apache.hadoop.yarn.api.records.AMCommand, int, org.apache.hadoop.yarn.api.records.PreemptionMessage, java.util.List, org.apache.hadoop.yarn.api.records.Token, java.util.List, java.util.List)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.protocolrecords.AllocateResponse","args":["int","java.util.List","java.util.List","java.util.List","org.apache.hadoop.yarn.api.records.Resource","org.apache.hadoop.yarn.api.records.AMCommand","int","org.apache.hadoop.yarn.api.records.PreemptionMessage","java.util.List","org.apache.hadoop.yarn.api.records.Token","java.util.List","java.util.List"],"exceptions":[]},"void setPreemptionMessage(org.apache.hadoop.yarn.api.records.PreemptionMessage)":{"name":"setPreemptionMessage","returnType":"void","args":["org.apache.hadoop.yarn.api.records.PreemptionMessage"],"exceptions":[]},"org.apache.hadoop.yarn.api.protocolrecords.AllocateResponse newInstance(int, java.util.List, java.util.List, java.util.List, org.apache.hadoop.yarn.api.records.Resource, org.apache.hadoop.yarn.api.records.AMCommand, int, org.apache.hadoop.yarn.api.records.PreemptionMessage, java.util.List)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.protocolrecords.AllocateResponse","args":["int","java.util.List","java.util.List","java.util.List","org.apache.hadoop.yarn.api.records.Resource","org.apache.hadoop.yarn.api.records.AMCommand","int","org.apache.hadoop.yarn.api.records.PreemptionMessage","java.util.List"],"exceptions":[]},"java.util.List getAllocatedContainers()":{"name":"getAllocatedContainers","returnType":"java.util.List","args":[],"exceptions":[]},"int getResponseId()":{"name":"getResponseId","returnType":"int","args":[],"exceptions":[]},"java.util.List getDecreasedContainers()":{"name":"getDecreasedContainers","returnType":"java.util.List","args":[],"exceptions":[]}}},"org.apache.hadoop.yarn.api.records.LocalResourceVisibility":{"name":"org.apache.hadoop.yarn.api.records.LocalResourceVisibility","methods":{"org.apache.hadoop.yarn.api.records.LocalResourceVisibility valueOf(java.lang.String)":{"name":"valueOf","returnType":"org.apache.hadoop.yarn.api.records.LocalResourceVisibility","args":["java.lang.String"],"exceptions":[]},"[Lorg.apache.hadoop.yarn.api.records.LocalResourceVisibility; values()":{"name":"values","returnType":"[Lorg.apache.hadoop.yarn.api.records.LocalResourceVisibility;","args":[],"exceptions":[]}}},"org.apache.hadoop.yarn.api.records.QueueUserACLInfo":{"name":"org.apache.hadoop.yarn.api.records.QueueUserACLInfo","methods":{"void setQueueName(java.lang.String)":{"name":"setQueueName","returnType":"void","args":["java.lang.String"],"exceptions":[]},"void setUserAcls(java.util.List)":{"name":"setUserAcls","returnType":"void","args":["java.util.List"],"exceptions":[]},"java.lang.String getQueueName()":{"name":"getQueueName","returnType":"java.lang.String","args":[],"exceptions":[]},"java.util.List getUserAcls()":{"name":"getUserAcls","returnType":"java.util.List","args":[],"exceptions":[]},"org.apache.hadoop.yarn.api.records.QueueUserACLInfo newInstance(java.lang.String, java.util.List)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.records.QueueUserACLInfo","args":["java.lang.String","java.util.List"],"exceptions":[]}}},"org.apache.hadoop.yarn.api.protocolrecords.GetDelegationTokenRequest":{"name":"org.apache.hadoop.yarn.api.protocolrecords.GetDelegationTokenRequest","methods":{"void setRenewer(java.lang.String)":{"name":"setRenewer","returnType":"void","args":["java.lang.String"],"exceptions":[]},"org.apache.hadoop.yarn.api.protocolrecords.GetDelegationTokenRequest newInstance(java.lang.String)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.protocolrecords.GetDelegationTokenRequest","args":["java.lang.String"],"exceptions":[]},"java.lang.String getRenewer()":{"name":"getRenewer","returnType":"java.lang.String","args":[],"exceptions":[]}}},"org.apache.hadoop.yarn.api.records.ContainerState":{"name":"org.apache.hadoop.yarn.api.records.ContainerState","methods":{"org.apache.hadoop.yarn.api.records.ContainerState valueOf(java.lang.String)":{"name":"valueOf","returnType":"org.apache.hadoop.yarn.api.records.ContainerState","args":["java.lang.String"],"exceptions":[]},"[Lorg.apache.hadoop.yarn.api.records.ContainerState; values()":{"name":"values","returnType":"[Lorg.apache.hadoop.yarn.api.records.ContainerState;","args":[],"exceptions":[]}}},"org.apache.hadoop.yarn.api.protocolrecords.GetNewApplicationRequest":{"name":"org.apache.hadoop.yarn.api.protocolrecords.GetNewApplicationRequest","methods":{"org.apache.hadoop.yarn.api.protocolrecords.GetNewApplicationRequest newInstance()":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.protocolrecords.GetNewApplicationRequest","args":[],"exceptions":[]}}},"org.apache.hadoop.yarn.api.ContainerManagementProtocol":{"name":"org.apache.hadoop.yarn.api.ContainerManagementProtocol","methods":{"org.apache.hadoop.yarn.api.protocolrecords.GetContainerStatusesResponse getContainerStatuses(org.apache.hadoop.yarn.api.protocolrecords.GetContainerStatusesRequest) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"getContainerStatuses","returnType":"org.apache.hadoop.yarn.api.protocolrecords.GetContainerStatusesResponse","args":["org.apache.hadoop.yarn.api.protocolrecords.GetContainerStatusesRequest"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"org.apache.hadoop.yarn.api.protocolrecords.StartContainersResponse startContainers(org.apache.hadoop.yarn.api.protocolrecords.StartContainersRequest) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"startContainers","returnType":"org.apache.hadoop.yarn.api.protocolrecords.StartContainersResponse","args":["org.apache.hadoop.yarn.api.protocolrecords.StartContainersRequest"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"org.apache.hadoop.yarn.api.protocolrecords.StopContainersResponse stopContainers(org.apache.hadoop.yarn.api.protocolrecords.StopContainersRequest) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"stopContainers","returnType":"org.apache.hadoop.yarn.api.protocolrecords.StopContainersResponse","args":["org.apache.hadoop.yarn.api.protocolrecords.StopContainersRequest"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]}}},"org.apache.hadoop.yarn.exceptions.YarnException":{"name":"org.apache.hadoop.yarn.exceptions.YarnException","methods":{}},"org.apache.hadoop.yarn.api.records.QueueInfo":{"name":"org.apache.hadoop.yarn.api.records.QueueInfo","methods":{"void setQueueName(java.lang.String)":{"name":"setQueueName","returnType":"void","args":["java.lang.String"],"exceptions":[]},"void setCurrentCapacity(float)":{"name":"setCurrentCapacity","returnType":"void","args":["float"],"exceptions":[]},"void setCapacity(float)":{"name":"setCapacity","returnType":"void","args":["float"],"exceptions":[]},"java.lang.String getQueueName()":{"name":"getQueueName","returnType":"java.lang.String","args":[],"exceptions":[]},"java.util.List getChildQueues()":{"name":"getChildQueues","returnType":"java.util.List","args":[],"exceptions":[]},"org.apache.hadoop.yarn.api.records.QueueInfo newInstance(java.lang.String, float, float, float, java.util.List, java.util.List, org.apache.hadoop.yarn.api.records.QueueState, java.util.Set, java.lang.String)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.records.QueueInfo","args":["java.lang.String","float","float","float","java.util.List","java.util.List","org.apache.hadoop.yarn.api.records.QueueState","java.util.Set","java.lang.String"],"exceptions":[]},"void setDefaultNodeLabelExpression(java.lang.String)":{"name":"setDefaultNodeLabelExpression","returnType":"void","args":["java.lang.String"],"exceptions":[]},"java.util.List getApplications()":{"name":"getApplications","returnType":"java.util.List","args":[],"exceptions":[]},"float getCapacity()":{"name":"getCapacity","returnType":"float","args":[],"exceptions":[]},"float getCurrentCapacity()":{"name":"getCurrentCapacity","returnType":"float","args":[],"exceptions":[]},"org.apache.hadoop.yarn.api.records.QueueState getQueueState()":{"name":"getQueueState","returnType":"org.apache.hadoop.yarn.api.records.QueueState","args":[],"exceptions":[]},"void setChildQueues(java.util.List)":{"name":"setChildQueues","returnType":"void","args":["java.util.List"],"exceptions":[]},"void setApplications(java.util.List)":{"name":"setApplications","returnType":"void","args":["java.util.List"],"exceptions":[]},"java.lang.String getDefaultNodeLabelExpression()":{"name":"getDefaultNodeLabelExpression","returnType":"java.lang.String","args":[],"exceptions":[]},"void setMaximumCapacity(float)":{"name":"setMaximumCapacity","returnType":"void","args":["float"],"exceptions":[]},"void setQueueState(org.apache.hadoop.yarn.api.records.QueueState)":{"name":"setQueueState","returnType":"void","args":["org.apache.hadoop.yarn.api.records.QueueState"],"exceptions":[]},"void setAccessibleNodeLabels(java.util.Set)":{"name":"setAccessibleNodeLabels","returnType":"void","args":["java.util.Set"],"exceptions":[]},"float getMaximumCapacity()":{"name":"getMaximumCapacity","returnType":"float","args":[],"exceptions":[]},"java.util.Set getAccessibleNodeLabels()":{"name":"getAccessibleNodeLabels","returnType":"java.util.Set","args":[],"exceptions":[]}}},"org.apache.hadoop.yarn.api.protocolrecords.GetDelegationTokenResponse":{"name":"org.apache.hadoop.yarn.api.protocolrecords.GetDelegationTokenResponse","methods":{"void setRMDelegationToken(org.apache.hadoop.yarn.api.records.Token)":{"name":"setRMDelegationToken","returnType":"void","args":["org.apache.hadoop.yarn.api.records.Token"],"exceptions":[]},"org.apache.hadoop.yarn.api.protocolrecords.GetDelegationTokenResponse newInstance(org.apache.hadoop.yarn.api.records.Token)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.protocolrecords.GetDelegationTokenResponse","args":["org.apache.hadoop.yarn.api.records.Token"],"exceptions":[]},"org.apache.hadoop.yarn.api.records.Token getRMDelegationToken()":{"name":"getRMDelegationToken","returnType":"org.apache.hadoop.yarn.api.records.Token","args":[],"exceptions":[]}}},"org.apache.hadoop.yarn.api.records.NodeId":{"name":"org.apache.hadoop.yarn.api.records.NodeId","methods":{"int compareTo(org.apache.hadoop.yarn.api.records.NodeId)":{"name":"compareTo","returnType":"int","args":["org.apache.hadoop.yarn.api.records.NodeId"],"exceptions":[]},"int hashCode()":{"name":"hashCode","returnType":"int","args":[],"exceptions":[]},"java.lang.String toString()":{"name":"toString","returnType":"java.lang.String","args":[],"exceptions":[]},"org.apache.hadoop.yarn.api.records.NodeId newInstance(java.lang.String, int)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.records.NodeId","args":["java.lang.String","int"],"exceptions":[]},"java.lang.String getHost()":{"name":"getHost","returnType":"java.lang.String","args":[],"exceptions":[]},"int compareTo(java.lang.Object)":{"name":"compareTo","returnType":"int","args":["java.lang.Object"],"exceptions":[]},"boolean equals(java.lang.Object)":{"name":"equals","returnType":"boolean","args":["java.lang.Object"],"exceptions":[]},"int getPort()":{"name":"getPort","returnType":"int","args":[],"exceptions":[]}}},"org.apache.hadoop.yarn.api.protocolrecords.SubmitApplicationRequest":{"name":"org.apache.hadoop.yarn.api.protocolrecords.SubmitApplicationRequest","methods":{"org.apache.hadoop.yarn.api.protocolrecords.SubmitApplicationRequest newInstance(org.apache.hadoop.yarn.api.records.ApplicationSubmissionContext)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.protocolrecords.SubmitApplicationRequest","args":["org.apache.hadoop.yarn.api.records.ApplicationSubmissionContext"],"exceptions":[]},"void setApplicationSubmissionContext(org.apache.hadoop.yarn.api.records.ApplicationSubmissionContext)":{"name":"setApplicationSubmissionContext","returnType":"void","args":["org.apache.hadoop.yarn.api.records.ApplicationSubmissionContext"],"exceptions":[]},"org.apache.hadoop.yarn.api.records.ApplicationSubmissionContext getApplicationSubmissionContext()":{"name":"getApplicationSubmissionContext","returnType":"org.apache.hadoop.yarn.api.records.ApplicationSubmissionContext","args":[],"exceptions":[]}}},"org.apache.hadoop.yarn.api.protocolrecords.KillApplicationResponse":{"name":"org.apache.hadoop.yarn.api.protocolrecords.KillApplicationResponse","methods":{"org.apache.hadoop.yarn.api.protocolrecords.KillApplicationResponse newInstance(boolean)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.protocolrecords.KillApplicationResponse","args":["boolean"],"exceptions":[]},"void setIsKillCompleted(boolean)":{"name":"setIsKillCompleted","returnType":"void","args":["boolean"],"exceptions":[]},"boolean getIsKillCompleted()":{"name":"getIsKillCompleted","returnType":"boolean","args":[],"exceptions":[]}}},"org.apache.hadoop.yarn.api.ApplicationClientProtocol":{"name":"org.apache.hadoop.yarn.api.ApplicationClientProtocol","methods":{"org.apache.hadoop.yarn.api.protocolrecords.GetNewApplicationResponse getNewApplication(org.apache.hadoop.yarn.api.protocolrecords.GetNewApplicationRequest) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"getNewApplication","returnType":"org.apache.hadoop.yarn.api.protocolrecords.GetNewApplicationResponse","args":["org.apache.hadoop.yarn.api.protocolrecords.GetNewApplicationRequest"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"org.apache.hadoop.yarn.api.protocolrecords.GetClusterNodesResponse getClusterNodes(org.apache.hadoop.yarn.api.protocolrecords.GetClusterNodesRequest) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"getClusterNodes","returnType":"org.apache.hadoop.yarn.api.protocolrecords.GetClusterNodesResponse","args":["org.apache.hadoop.yarn.api.protocolrecords.GetClusterNodesRequest"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"org.apache.hadoop.yarn.api.protocolrecords.ReservationUpdateResponse updateReservation(org.apache.hadoop.yarn.api.protocolrecords.ReservationUpdateRequest) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"updateReservation","returnType":"org.apache.hadoop.yarn.api.protocolrecords.ReservationUpdateResponse","args":["org.apache.hadoop.yarn.api.protocolrecords.ReservationUpdateRequest"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"org.apache.hadoop.yarn.api.protocolrecords.GetLabelsToNodesResponse getLabelsToNodes(org.apache.hadoop.yarn.api.protocolrecords.GetLabelsToNodesRequest) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"getLabelsToNodes","returnType":"org.apache.hadoop.yarn.api.protocolrecords.GetLabelsToNodesResponse","args":["org.apache.hadoop.yarn.api.protocolrecords.GetLabelsToNodesRequest"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"org.apache.hadoop.yarn.api.protocolrecords.MoveApplicationAcrossQueuesResponse moveApplicationAcrossQueues(org.apache.hadoop.yarn.api.protocolrecords.MoveApplicationAcrossQueuesRequest) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"moveApplicationAcrossQueues","returnType":"org.apache.hadoop.yarn.api.protocolrecords.MoveApplicationAcrossQueuesResponse","args":["org.apache.hadoop.yarn.api.protocolrecords.MoveApplicationAcrossQueuesRequest"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"org.apache.hadoop.yarn.api.protocolrecords.ReservationDeleteResponse deleteReservation(org.apache.hadoop.yarn.api.protocolrecords.ReservationDeleteRequest) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"deleteReservation","returnType":"org.apache.hadoop.yarn.api.protocolrecords.ReservationDeleteResponse","args":["org.apache.hadoop.yarn.api.protocolrecords.ReservationDeleteRequest"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"org.apache.hadoop.yarn.api.protocolrecords.GetQueueUserAclsInfoResponse getQueueUserAcls(org.apache.hadoop.yarn.api.protocolrecords.GetQueueUserAclsInfoRequest) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"getQueueUserAcls","returnType":"org.apache.hadoop.yarn.api.protocolrecords.GetQueueUserAclsInfoResponse","args":["org.apache.hadoop.yarn.api.protocolrecords.GetQueueUserAclsInfoRequest"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"org.apache.hadoop.yarn.api.protocolrecords.SubmitApplicationResponse submitApplication(org.apache.hadoop.yarn.api.protocolrecords.SubmitApplicationRequest) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"submitApplication","returnType":"org.apache.hadoop.yarn.api.protocolrecords.SubmitApplicationResponse","args":["org.apache.hadoop.yarn.api.protocolrecords.SubmitApplicationRequest"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"org.apache.hadoop.yarn.api.protocolrecords.KillApplicationResponse forceKillApplication(org.apache.hadoop.yarn.api.protocolrecords.KillApplicationRequest) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"forceKillApplication","returnType":"org.apache.hadoop.yarn.api.protocolrecords.KillApplicationResponse","args":["org.apache.hadoop.yarn.api.protocolrecords.KillApplicationRequest"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"org.apache.hadoop.yarn.api.protocolrecords.GetNodesToLabelsResponse getNodeToLabels(org.apache.hadoop.yarn.api.protocolrecords.GetNodesToLabelsRequest) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"getNodeToLabels","returnType":"org.apache.hadoop.yarn.api.protocolrecords.GetNodesToLabelsResponse","args":["org.apache.hadoop.yarn.api.protocolrecords.GetNodesToLabelsRequest"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"org.apache.hadoop.yarn.api.protocolrecords.ReservationSubmissionResponse submitReservation(org.apache.hadoop.yarn.api.protocolrecords.ReservationSubmissionRequest) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"submitReservation","returnType":"org.apache.hadoop.yarn.api.protocolrecords.ReservationSubmissionResponse","args":["org.apache.hadoop.yarn.api.protocolrecords.ReservationSubmissionRequest"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"org.apache.hadoop.yarn.api.protocolrecords.GetQueueInfoResponse getQueueInfo(org.apache.hadoop.yarn.api.protocolrecords.GetQueueInfoRequest) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"getQueueInfo","returnType":"org.apache.hadoop.yarn.api.protocolrecords.GetQueueInfoResponse","args":["org.apache.hadoop.yarn.api.protocolrecords.GetQueueInfoRequest"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"org.apache.hadoop.yarn.api.protocolrecords.GetClusterMetricsResponse getClusterMetrics(org.apache.hadoop.yarn.api.protocolrecords.GetClusterMetricsRequest) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"getClusterMetrics","returnType":"org.apache.hadoop.yarn.api.protocolrecords.GetClusterMetricsResponse","args":["org.apache.hadoop.yarn.api.protocolrecords.GetClusterMetricsRequest"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"org.apache.hadoop.yarn.api.protocolrecords.GetClusterNodeLabelsResponse getClusterNodeLabels(org.apache.hadoop.yarn.api.protocolrecords.GetClusterNodeLabelsRequest) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"getClusterNodeLabels","returnType":"org.apache.hadoop.yarn.api.protocolrecords.GetClusterNodeLabelsResponse","args":["org.apache.hadoop.yarn.api.protocolrecords.GetClusterNodeLabelsRequest"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]}}},"org.apache.hadoop.yarn.api.protocolrecords.GetContainerStatusesRequest":{"name":"org.apache.hadoop.yarn.api.protocolrecords.GetContainerStatusesRequest","methods":{"void setContainerIds(java.util.List)":{"name":"setContainerIds","returnType":"void","args":["java.util.List"],"exceptions":[]},"org.apache.hadoop.yarn.api.protocolrecords.GetContainerStatusesRequest newInstance(java.util.List)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.protocolrecords.GetContainerStatusesRequest","args":["java.util.List"],"exceptions":[]},"java.util.List getContainerIds()":{"name":"getContainerIds","returnType":"java.util.List","args":[],"exceptions":[]}}},"org.apache.hadoop.yarn.api.protocolrecords.GetNewApplicationResponse":{"name":"org.apache.hadoop.yarn.api.protocolrecords.GetNewApplicationResponse","methods":{"org.apache.hadoop.yarn.api.protocolrecords.GetNewApplicationResponse newInstance(org.apache.hadoop.yarn.api.records.ApplicationId, org.apache.hadoop.yarn.api.records.Resource, org.apache.hadoop.yarn.api.records.Resource)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.protocolrecords.GetNewApplicationResponse","args":["org.apache.hadoop.yarn.api.records.ApplicationId","org.apache.hadoop.yarn.api.records.Resource","org.apache.hadoop.yarn.api.records.Resource"],"exceptions":[]},"org.apache.hadoop.yarn.api.records.ApplicationId getApplicationId()":{"name":"getApplicationId","returnType":"org.apache.hadoop.yarn.api.records.ApplicationId","args":[],"exceptions":[]},"void setApplicationId(org.apache.hadoop.yarn.api.records.ApplicationId)":{"name":"setApplicationId","returnType":"void","args":["org.apache.hadoop.yarn.api.records.ApplicationId"],"exceptions":[]},"org.apache.hadoop.yarn.api.records.Resource getMaximumResourceCapability()":{"name":"getMaximumResourceCapability","returnType":"org.apache.hadoop.yarn.api.records.Resource","args":[],"exceptions":[]},"void setMaximumResourceCapability(org.apache.hadoop.yarn.api.records.Resource)":{"name":"setMaximumResourceCapability","returnType":"void","args":["org.apache.hadoop.yarn.api.records.Resource"],"exceptions":[]}}},"org.apache.hadoop.yarn.api.records.ContainerId":{"name":"org.apache.hadoop.yarn.api.records.ContainerId","methods":{"int hashCode()":{"name":"hashCode","returnType":"int","args":[],"exceptions":[]},"java.lang.String toString()":{"name":"toString","returnType":"java.lang.String","args":[],"exceptions":[]},"org.apache.hadoop.yarn.api.records.ContainerId newInstance(org.apache.hadoop.yarn.api.records.ApplicationAttemptId, int)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.records.ContainerId","args":["org.apache.hadoop.yarn.api.records.ApplicationAttemptId","int"],"exceptions":[]},"int getId()":{"name":"getId","returnType":"int","args":[],"exceptions":[]},"long getContainerId()":{"name":"getContainerId","returnType":"long","args":[],"exceptions":[]},"int compareTo(org.apache.hadoop.yarn.api.records.ContainerId)":{"name":"compareTo","returnType":"int","args":["org.apache.hadoop.yarn.api.records.ContainerId"],"exceptions":[]},"org.apache.hadoop.yarn.api.records.ContainerId newContainerId(org.apache.hadoop.yarn.api.records.ApplicationAttemptId, long)":{"name":"newContainerId","returnType":"org.apache.hadoop.yarn.api.records.ContainerId","args":["org.apache.hadoop.yarn.api.records.ApplicationAttemptId","long"],"exceptions":[]},"org.apache.hadoop.yarn.api.records.ApplicationAttemptId getApplicationAttemptId()":{"name":"getApplicationAttemptId","returnType":"org.apache.hadoop.yarn.api.records.ApplicationAttemptId","args":[],"exceptions":[]},"org.apache.hadoop.yarn.api.records.ContainerId fromString(java.lang.String)":{"name":"fromString","returnType":"org.apache.hadoop.yarn.api.records.ContainerId","args":["java.lang.String"],"exceptions":[]},"int compareTo(java.lang.Object)":{"name":"compareTo","returnType":"int","args":["java.lang.Object"],"exceptions":[]},"boolean equals(java.lang.Object)":{"name":"equals","returnType":"boolean","args":["java.lang.Object"],"exceptions":[]}}},"org.apache.hadoop.yarn.api.records.Container":{"name":"org.apache.hadoop.yarn.api.records.Container","methods":{"org.apache.hadoop.yarn.api.records.Priority getPriority()":{"name":"getPriority","returnType":"org.apache.hadoop.yarn.api.records.Priority","args":[],"exceptions":[]},"java.lang.String getNodeHttpAddress()":{"name":"getNodeHttpAddress","returnType":"java.lang.String","args":[],"exceptions":[]},"org.apache.hadoop.yarn.api.records.NodeId getNodeId()":{"name":"getNodeId","returnType":"org.apache.hadoop.yarn.api.records.NodeId","args":[],"exceptions":[]},"org.apache.hadoop.yarn.api.records.ContainerId getId()":{"name":"getId","returnType":"org.apache.hadoop.yarn.api.records.ContainerId","args":[],"exceptions":[]},"void setContainerToken(org.apache.hadoop.yarn.api.records.Token)":{"name":"setContainerToken","returnType":"void","args":["org.apache.hadoop.yarn.api.records.Token"],"exceptions":[]},"void setNodeId(org.apache.hadoop.yarn.api.records.NodeId)":{"name":"setNodeId","returnType":"void","args":["org.apache.hadoop.yarn.api.records.NodeId"],"exceptions":[]},"org.apache.hadoop.yarn.api.records.Token getContainerToken()":{"name":"getContainerToken","returnType":"org.apache.hadoop.yarn.api.records.Token","args":[],"exceptions":[]},"org.apache.hadoop.yarn.api.records.Resource getResource()":{"name":"getResource","returnType":"org.apache.hadoop.yarn.api.records.Resource","args":[],"exceptions":[]},"org.apache.hadoop.yarn.api.records.Container newInstance(org.apache.hadoop.yarn.api.records.ContainerId, org.apache.hadoop.yarn.api.records.NodeId, java.lang.String, org.apache.hadoop.yarn.api.records.Resource, org.apache.hadoop.yarn.api.records.Priority, org.apache.hadoop.yarn.api.records.Token)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.records.Container","args":["org.apache.hadoop.yarn.api.records.ContainerId","org.apache.hadoop.yarn.api.records.NodeId","java.lang.String","org.apache.hadoop.yarn.api.records.Resource","org.apache.hadoop.yarn.api.records.Priority","org.apache.hadoop.yarn.api.records.Token"],"exceptions":[]},"void setPriority(org.apache.hadoop.yarn.api.records.Priority)":{"name":"setPriority","returnType":"void","args":["org.apache.hadoop.yarn.api.records.Priority"],"exceptions":[]},"void setResource(org.apache.hadoop.yarn.api.records.Resource)":{"name":"setResource","returnType":"void","args":["org.apache.hadoop.yarn.api.records.Resource"],"exceptions":[]},"void setId(org.apache.hadoop.yarn.api.records.ContainerId)":{"name":"setId","returnType":"void","args":["org.apache.hadoop.yarn.api.records.ContainerId"],"exceptions":[]},"void setNodeHttpAddress(java.lang.String)":{"name":"setNodeHttpAddress","returnType":"void","args":["java.lang.String"],"exceptions":[]}}},"org.apache.hadoop.yarn.api.records.ResourceRequest":{"name":"org.apache.hadoop.yarn.api.records.ResourceRequest","methods":{"int hashCode()":{"name":"hashCode","returnType":"int","args":[],"exceptions":[]},"org.apache.hadoop.yarn.api.records.ResourceRequest newInstance(org.apache.hadoop.yarn.api.records.Priority, java.lang.String, org.apache.hadoop.yarn.api.records.Resource, int, boolean, java.lang.String)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.records.ResourceRequest","args":["org.apache.hadoop.yarn.api.records.Priority","java.lang.String","org.apache.hadoop.yarn.api.records.Resource","int","boolean","java.lang.String"],"exceptions":[]},"org.apache.hadoop.yarn.api.records.Priority getPriority()":{"name":"getPriority","returnType":"org.apache.hadoop.yarn.api.records.Priority","args":[],"exceptions":[]},"void setCapability(org.apache.hadoop.yarn.api.records.Resource)":{"name":"setCapability","returnType":"void","args":["org.apache.hadoop.yarn.api.records.Resource"],"exceptions":[]},"int getNumContainers()":{"name":"getNumContainers","returnType":"int","args":[],"exceptions":[]},"org.apache.hadoop.yarn.api.records.ResourceRequest newInstance(org.apache.hadoop.yarn.api.records.Priority, java.lang.String, org.apache.hadoop.yarn.api.records.Resource, int)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.records.ResourceRequest","args":["org.apache.hadoop.yarn.api.records.Priority","java.lang.String","org.apache.hadoop.yarn.api.records.Resource","int"],"exceptions":[]},"void setRelaxLocality(boolean)":{"name":"setRelaxLocality","returnType":"void","args":["boolean"],"exceptions":[]},"void setResourceName(java.lang.String)":{"name":"setResourceName","returnType":"void","args":["java.lang.String"],"exceptions":[]},"org.apache.hadoop.yarn.api.records.Resource getCapability()":{"name":"getCapability","returnType":"org.apache.hadoop.yarn.api.records.Resource","args":[],"exceptions":[]},"int compareTo(java.lang.Object)":{"name":"compareTo","returnType":"int","args":["java.lang.Object"],"exceptions":[]},"boolean equals(java.lang.Object)":{"name":"equals","returnType":"boolean","args":["java.lang.Object"],"exceptions":[]},"org.apache.hadoop.yarn.api.records.ResourceRequest newInstance(org.apache.hadoop.yarn.api.records.Priority, java.lang.String, org.apache.hadoop.yarn.api.records.Resource, int, boolean)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.records.ResourceRequest","args":["org.apache.hadoop.yarn.api.records.Priority","java.lang.String","org.apache.hadoop.yarn.api.records.Resource","int","boolean"],"exceptions":[]},"void setNodeLabelExpression(java.lang.String)":{"name":"setNodeLabelExpression","returnType":"void","args":["java.lang.String"],"exceptions":[]},"java.lang.String getNodeLabelExpression()":{"name":"getNodeLabelExpression","returnType":"java.lang.String","args":[],"exceptions":[]},"boolean getRelaxLocality()":{"name":"getRelaxLocality","returnType":"boolean","args":[],"exceptions":[]},"int compareTo(org.apache.hadoop.yarn.api.records.ResourceRequest)":{"name":"compareTo","returnType":"int","args":["org.apache.hadoop.yarn.api.records.ResourceRequest"],"exceptions":[]},"java.lang.String getResourceName()":{"name":"getResourceName","returnType":"java.lang.String","args":[],"exceptions":[]},"void setPriority(org.apache.hadoop.yarn.api.records.Priority)":{"name":"setPriority","returnType":"void","args":["org.apache.hadoop.yarn.api.records.Priority"],"exceptions":[]},"void setNumContainers(int)":{"name":"setNumContainers","returnType":"void","args":["int"],"exceptions":[]},"boolean isAnyLocation(java.lang.String)":{"name":"isAnyLocation","returnType":"boolean","args":["java.lang.String"],"exceptions":[]}}},"org.apache.hadoop.yarn.api.records.ContainerStatus":{"name":"org.apache.hadoop.yarn.api.records.ContainerStatus","methods":{"org.apache.hadoop.yarn.api.records.ContainerState getState()":{"name":"getState","returnType":"org.apache.hadoop.yarn.api.records.ContainerState","args":[],"exceptions":[]},"org.apache.hadoop.yarn.api.records.ContainerId getContainerId()":{"name":"getContainerId","returnType":"org.apache.hadoop.yarn.api.records.ContainerId","args":[],"exceptions":[]},"int getExitStatus()":{"name":"getExitStatus","returnType":"int","args":[],"exceptions":[]},"void setExitStatus(int)":{"name":"setExitStatus","returnType":"void","args":["int"],"exceptions":[]},"void setState(org.apache.hadoop.yarn.api.records.ContainerState)":{"name":"setState","returnType":"void","args":["org.apache.hadoop.yarn.api.records.ContainerState"],"exceptions":[]},"void setDiagnostics(java.lang.String)":{"name":"setDiagnostics","returnType":"void","args":["java.lang.String"],"exceptions":[]},"org.apache.hadoop.yarn.api.records.ContainerStatus newInstance(org.apache.hadoop.yarn.api.records.ContainerId, org.apache.hadoop.yarn.api.records.ContainerState, java.lang.String, int)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.records.ContainerStatus","args":["org.apache.hadoop.yarn.api.records.ContainerId","org.apache.hadoop.yarn.api.records.ContainerState","java.lang.String","int"],"exceptions":[]},"void setContainerId(org.apache.hadoop.yarn.api.records.ContainerId)":{"name":"setContainerId","returnType":"void","args":["org.apache.hadoop.yarn.api.records.ContainerId"],"exceptions":[]},"java.lang.String getDiagnostics()":{"name":"getDiagnostics","returnType":"java.lang.String","args":[],"exceptions":[]}}},"org.apache.hadoop.yarn.api.records.ApplicationSubmissionContext":{"name":"org.apache.hadoop.yarn.api.records.ApplicationSubmissionContext","methods":{"void setMaxAppAttempts(int)":{"name":"setMaxAppAttempts","returnType":"void","args":["int"],"exceptions":[]},"org.apache.hadoop.yarn.api.records.ReservationId getReservationID()":{"name":"getReservationID","returnType":"org.apache.hadoop.yarn.api.records.ReservationId","args":[],"exceptions":[]},"java.util.Set getApplicationTags()":{"name":"getApplicationTags","returnType":"java.util.Set","args":[],"exceptions":[]},"org.apache.hadoop.yarn.api.records.ApplicationId getApplicationId()":{"name":"getApplicationId","returnType":"org.apache.hadoop.yarn.api.records.ApplicationId","args":[],"exceptions":[]},"boolean getUnmanagedAM()":{"name":"getUnmanagedAM","returnType":"boolean","args":[],"exceptions":[]},"void setApplicationType(java.lang.String)":{"name":"setApplicationType","returnType":"void","args":["java.lang.String"],"exceptions":[]},"org.apache.hadoop.yarn.api.records.ApplicationSubmissionContext newInstance(org.apache.hadoop.yarn.api.records.ApplicationId, java.lang.String, java.lang.String, org.apache.hadoop.yarn.api.records.ContainerLaunchContext, boolean, boolean, int, java.lang.String, boolean, java.lang.String, org.apache.hadoop.yarn.api.records.ResourceRequest)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.records.ApplicationSubmissionContext","args":["org.apache.hadoop.yarn.api.records.ApplicationId","java.lang.String","java.lang.String","org.apache.hadoop.yarn.api.records.ContainerLaunchContext","boolean","boolean","int","java.lang.String","boolean","java.lang.String","org.apache.hadoop.yarn.api.records.ResourceRequest"],"exceptions":[]},"void setUnmanagedAM(boolean)":{"name":"setUnmanagedAM","returnType":"void","args":["boolean"],"exceptions":[]},"org.apache.hadoop.yarn.api.records.ApplicationSubmissionContext newInstance(org.apache.hadoop.yarn.api.records.ApplicationId, java.lang.String, java.lang.String, org.apache.hadoop.yarn.api.records.Priority, org.apache.hadoop.yarn.api.records.ContainerLaunchContext, boolean, boolean, int, org.apache.hadoop.yarn.api.records.Resource, java.lang.String, boolean, org.apache.hadoop.yarn.api.records.LogAggregationContext)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.records.ApplicationSubmissionContext","args":["org.apache.hadoop.yarn.api.records.ApplicationId","java.lang.String","java.lang.String","org.apache.hadoop.yarn.api.records.Priority","org.apache.hadoop.yarn.api.records.ContainerLaunchContext","boolean","boolean","int","org.apache.hadoop.yarn.api.records.Resource","java.lang.String","boolean","org.apache.hadoop.yarn.api.records.LogAggregationContext"],"exceptions":[]},"org.apache.hadoop.yarn.api.records.ApplicationSubmissionContext newInstance(org.apache.hadoop.yarn.api.records.ApplicationId, java.lang.String, java.lang.String, org.apache.hadoop.yarn.api.records.Priority, org.apache.hadoop.yarn.api.records.ContainerLaunchContext, boolean, boolean, int, org.apache.hadoop.yarn.api.records.Resource, java.lang.String, boolean)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.records.ApplicationSubmissionContext","args":["org.apache.hadoop.yarn.api.records.ApplicationId","java.lang.String","java.lang.String","org.apache.hadoop.yarn.api.records.Priority","org.apache.hadoop.yarn.api.records.ContainerLaunchContext","boolean","boolean","int","org.apache.hadoop.yarn.api.records.Resource","java.lang.String","boolean"],"exceptions":[]},"org.apache.hadoop.yarn.api.records.Resource getResource()":{"name":"getResource","returnType":"org.apache.hadoop.yarn.api.records.Resource","args":[],"exceptions":[]},"void setNodeLabelExpression(java.lang.String)":{"name":"setNodeLabelExpression","returnType":"void","args":["java.lang.String"],"exceptions":[]},"long getAttemptFailuresValidityInterval()":{"name":"getAttemptFailuresValidityInterval","returnType":"long","args":[],"exceptions":[]},"void setQueue(java.lang.String)":{"name":"setQueue","returnType":"void","args":["java.lang.String"],"exceptions":[]},"void setAMContainerResourceRequest(org.apache.hadoop.yarn.api.records.ResourceRequest)":{"name":"setAMContainerResourceRequest","returnType":"void","args":["org.apache.hadoop.yarn.api.records.ResourceRequest"],"exceptions":[]},"org.apache.hadoop.yarn.api.records.ApplicationSubmissionContext newInstance(org.apache.hadoop.yarn.api.records.ApplicationId, java.lang.String, java.lang.String, org.apache.hadoop.yarn.api.records.Priority, org.apache.hadoop.yarn.api.records.ContainerLaunchContext, boolean, boolean, int, org.apache.hadoop.yarn.api.records.Resource, java.lang.String, boolean, long)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.records.ApplicationSubmissionContext","args":["org.apache.hadoop.yarn.api.records.ApplicationId","java.lang.String","java.lang.String","org.apache.hadoop.yarn.api.records.Priority","org.apache.hadoop.yarn.api.records.ContainerLaunchContext","boolean","boolean","int","org.apache.hadoop.yarn.api.records.Resource","java.lang.String","boolean","long"],"exceptions":[]},"org.apache.hadoop.yarn.api.records.LogAggregationContext getLogAggregationContext()":{"name":"getLogAggregationContext","returnType":"org.apache.hadoop.yarn.api.records.LogAggregationContext","args":[],"exceptions":[]},"java.lang.String getNodeLabelExpression()":{"name":"getNodeLabelExpression","returnType":"java.lang.String","args":[],"exceptions":[]},"void setLogAggregationContext(org.apache.hadoop.yarn.api.records.LogAggregationContext)":{"name":"setLogAggregationContext","returnType":"void","args":["org.apache.hadoop.yarn.api.records.LogAggregationContext"],"exceptions":[]},"void setReservationID(org.apache.hadoop.yarn.api.records.ReservationId)":{"name":"setReservationID","returnType":"void","args":["org.apache.hadoop.yarn.api.records.ReservationId"],"exceptions":[]},"void setApplicationName(java.lang.String)":{"name":"setApplicationName","returnType":"void","args":["java.lang.String"],"exceptions":[]},"boolean getCancelTokensWhenComplete()":{"name":"getCancelTokensWhenComplete","returnType":"boolean","args":[],"exceptions":[]},"org.apache.hadoop.yarn.api.records.ResourceRequest getAMContainerResourceRequest()":{"name":"getAMContainerResourceRequest","returnType":"org.apache.hadoop.yarn.api.records.ResourceRequest","args":[],"exceptions":[]},"void setApplicationTags(java.util.Set)":{"name":"setApplicationTags","returnType":"void","args":["java.util.Set"],"exceptions":[]},"org.apache.hadoop.yarn.api.records.ApplicationSubmissionContext newInstance(org.apache.hadoop.yarn.api.records.ApplicationId, java.lang.String, java.lang.String, org.apache.hadoop.yarn.api.records.Priority, org.apache.hadoop.yarn.api.records.ContainerLaunchContext, boolean, boolean, int, org.apache.hadoop.yarn.api.records.Resource)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.records.ApplicationSubmissionContext","args":["org.apache.hadoop.yarn.api.records.ApplicationId","java.lang.String","java.lang.String","org.apache.hadoop.yarn.api.records.Priority","org.apache.hadoop.yarn.api.records.ContainerLaunchContext","boolean","boolean","int","org.apache.hadoop.yarn.api.records.Resource"],"exceptions":[]},"void setResource(org.apache.hadoop.yarn.api.records.Resource)":{"name":"setResource","returnType":"void","args":["org.apache.hadoop.yarn.api.records.Resource"],"exceptions":[]},"java.lang.String getQueue()":{"name":"getQueue","returnType":"java.lang.String","args":[],"exceptions":[]},"int getMaxAppAttempts()":{"name":"getMaxAppAttempts","returnType":"int","args":[],"exceptions":[]},"void setAttemptFailuresValidityInterval(long)":{"name":"setAttemptFailuresValidityInterval","returnType":"void","args":["long"],"exceptions":[]},"org.apache.hadoop.yarn.api.records.Priority getPriority()":{"name":"getPriority","returnType":"org.apache.hadoop.yarn.api.records.Priority","args":[],"exceptions":[]},"void setCancelTokensWhenComplete(boolean)":{"name":"setCancelTokensWhenComplete","returnType":"void","args":["boolean"],"exceptions":[]},"org.apache.hadoop.yarn.api.records.ApplicationSubmissionContext newInstance(org.apache.hadoop.yarn.api.records.ApplicationId, java.lang.String, java.lang.String, org.apache.hadoop.yarn.api.records.Priority, org.apache.hadoop.yarn.api.records.ContainerLaunchContext, boolean, boolean, int, org.apache.hadoop.yarn.api.records.Resource, java.lang.String, boolean, java.lang.String, java.lang.String)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.records.ApplicationSubmissionContext","args":["org.apache.hadoop.yarn.api.records.ApplicationId","java.lang.String","java.lang.String","org.apache.hadoop.yarn.api.records.Priority","org.apache.hadoop.yarn.api.records.ContainerLaunchContext","boolean","boolean","int","org.apache.hadoop.yarn.api.records.Resource","java.lang.String","boolean","java.lang.String","java.lang.String"],"exceptions":[]},"void setApplicationId(org.apache.hadoop.yarn.api.records.ApplicationId)":{"name":"setApplicationId","returnType":"void","args":["org.apache.hadoop.yarn.api.records.ApplicationId"],"exceptions":[]},"boolean getKeepContainersAcrossApplicationAttempts()":{"name":"getKeepContainersAcrossApplicationAttempts","returnType":"boolean","args":[],"exceptions":[]},"org.apache.hadoop.yarn.api.records.ApplicationSubmissionContext newInstance(org.apache.hadoop.yarn.api.records.ApplicationId, java.lang.String, java.lang.String, org.apache.hadoop.yarn.api.records.Priority, org.apache.hadoop.yarn.api.records.ContainerLaunchContext, boolean, boolean, int, org.apache.hadoop.yarn.api.records.Resource, java.lang.String)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.records.ApplicationSubmissionContext","args":["org.apache.hadoop.yarn.api.records.ApplicationId","java.lang.String","java.lang.String","org.apache.hadoop.yarn.api.records.Priority","org.apache.hadoop.yarn.api.records.ContainerLaunchContext","boolean","boolean","int","org.apache.hadoop.yarn.api.records.Resource","java.lang.String"],"exceptions":[]},"void setAMContainerSpec(org.apache.hadoop.yarn.api.records.ContainerLaunchContext)":{"name":"setAMContainerSpec","returnType":"void","args":["org.apache.hadoop.yarn.api.records.ContainerLaunchContext"],"exceptions":[]},"org.apache.hadoop.yarn.api.records.ContainerLaunchContext getAMContainerSpec()":{"name":"getAMContainerSpec","returnType":"org.apache.hadoop.yarn.api.records.ContainerLaunchContext","args":[],"exceptions":[]},"java.lang.String getApplicationType()":{"name":"getApplicationType","returnType":"java.lang.String","args":[],"exceptions":[]},"java.lang.String getApplicationName()":{"name":"getApplicationName","returnType":"java.lang.String","args":[],"exceptions":[]},"void setKeepContainersAcrossApplicationAttempts(boolean)":{"name":"setKeepContainersAcrossApplicationAttempts","returnType":"void","args":["boolean"],"exceptions":[]},"void setPriority(org.apache.hadoop.yarn.api.records.Priority)":{"name":"setPriority","returnType":"void","args":["org.apache.hadoop.yarn.api.records.Priority"],"exceptions":[]}}},"org.apache.hadoop.yarn.api.protocolrecords.GetClusterNodesResponse":{"name":"org.apache.hadoop.yarn.api.protocolrecords.GetClusterNodesResponse","methods":{"org.apache.hadoop.yarn.api.protocolrecords.GetClusterNodesResponse newInstance(java.util.List)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.protocolrecords.GetClusterNodesResponse","args":["java.util.List"],"exceptions":[]},"void setNodeReports(java.util.List)":{"name":"setNodeReports","returnType":"void","args":["java.util.List"],"exceptions":[]},"java.util.List getNodeReports()":{"name":"getNodeReports","returnType":"java.util.List","args":[],"exceptions":[]}}},"org.apache.hadoop.yarn.api.records.Token":{"name":"org.apache.hadoop.yarn.api.records.Token","methods":{"org.apache.hadoop.yarn.api.records.Token newInstance([B, java.lang.String, [B, java.lang.String)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.records.Token","args":["[B","java.lang.String","[B","java.lang.String"],"exceptions":[]},"void setIdentifier(java.nio.ByteBuffer)":{"name":"setIdentifier","returnType":"void","args":["java.nio.ByteBuffer"],"exceptions":[]},"void setPassword(java.nio.ByteBuffer)":{"name":"setPassword","returnType":"void","args":["java.nio.ByteBuffer"],"exceptions":[]},"java.lang.String getKind()":{"name":"getKind","returnType":"java.lang.String","args":[],"exceptions":[]},"void setKind(java.lang.String)":{"name":"setKind","returnType":"void","args":["java.lang.String"],"exceptions":[]},"void setService(java.lang.String)":{"name":"setService","returnType":"void","args":["java.lang.String"],"exceptions":[]},"java.nio.ByteBuffer getPassword()":{"name":"getPassword","returnType":"java.nio.ByteBuffer","args":[],"exceptions":[]},"java.nio.ByteBuffer getIdentifier()":{"name":"getIdentifier","returnType":"java.nio.ByteBuffer","args":[],"exceptions":[]},"java.lang.String getService()":{"name":"getService","returnType":"java.lang.String","args":[],"exceptions":[]}}},"org.apache.hadoop.yarn.api.protocolrecords.StartContainersResponse":{"name":"org.apache.hadoop.yarn.api.protocolrecords.StartContainersResponse","methods":{"void setAllServicesMetaData(java.util.Map)":{"name":"setAllServicesMetaData","returnType":"void","args":["java.util.Map"],"exceptions":[]},"org.apache.hadoop.yarn.api.protocolrecords.StartContainersResponse newInstance(java.util.Map, java.util.List, java.util.Map)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.protocolrecords.StartContainersResponse","args":["java.util.Map","java.util.List","java.util.Map"],"exceptions":[]},"void setFailedRequests(java.util.Map)":{"name":"setFailedRequests","returnType":"void","args":["java.util.Map"],"exceptions":[]},"void setSuccessfullyStartedContainers(java.util.List)":{"name":"setSuccessfullyStartedContainers","returnType":"void","args":["java.util.List"],"exceptions":[]},"java.util.Map getAllServicesMetaData()":{"name":"getAllServicesMetaData","returnType":"java.util.Map","args":[],"exceptions":[]},"java.util.List getSuccessfullyStartedContainers()":{"name":"getSuccessfullyStartedContainers","returnType":"java.util.List","args":[],"exceptions":[]},"java.util.Map getFailedRequests()":{"name":"getFailedRequests","returnType":"java.util.Map","args":[],"exceptions":[]}}},"org.apache.hadoop.yarn.api.protocolrecords.RegisterApplicationMasterResponse":{"name":"org.apache.hadoop.yarn.api.protocolrecords.RegisterApplicationMasterResponse","methods":{"void setNMTokensFromPreviousAttempts(java.util.List)":{"name":"setNMTokensFromPreviousAttempts","returnType":"void","args":["java.util.List"],"exceptions":[]},"java.util.Map getApplicationACLs()":{"name":"getApplicationACLs","returnType":"java.util.Map","args":[],"exceptions":[]},"void setSchedulerResourceTypes(java.util.EnumSet)":{"name":"setSchedulerResourceTypes","returnType":"void","args":["java.util.EnumSet"],"exceptions":[]},"java.util.List getContainersFromPreviousAttempts()":{"name":"getContainersFromPreviousAttempts","returnType":"java.util.List","args":[],"exceptions":[]},"java.util.List getNMTokensFromPreviousAttempts()":{"name":"getNMTokensFromPreviousAttempts","returnType":"java.util.List","args":[],"exceptions":[]},"java.util.EnumSet getSchedulerResourceTypes()":{"name":"getSchedulerResourceTypes","returnType":"java.util.EnumSet","args":[],"exceptions":[]},"void setApplicationACLs(java.util.Map)":{"name":"setApplicationACLs","returnType":"void","args":["java.util.Map"],"exceptions":[]},"void setQueue(java.lang.String)":{"name":"setQueue","returnType":"void","args":["java.lang.String"],"exceptions":[]},"org.apache.hadoop.yarn.api.protocolrecords.RegisterApplicationMasterResponse newInstance(org.apache.hadoop.yarn.api.records.Resource, org.apache.hadoop.yarn.api.records.Resource, java.util.Map, java.nio.ByteBuffer, java.util.List, java.lang.String, java.util.List)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.protocolrecords.RegisterApplicationMasterResponse","args":["org.apache.hadoop.yarn.api.records.Resource","org.apache.hadoop.yarn.api.records.Resource","java.util.Map","java.nio.ByteBuffer","java.util.List","java.lang.String","java.util.List"],"exceptions":[]},"org.apache.hadoop.yarn.api.records.Resource getMaximumResourceCapability()":{"name":"getMaximumResourceCapability","returnType":"org.apache.hadoop.yarn.api.records.Resource","args":[],"exceptions":[]},"void setClientToAMTokenMasterKey(java.nio.ByteBuffer)":{"name":"setClientToAMTokenMasterKey","returnType":"void","args":["java.nio.ByteBuffer"],"exceptions":[]},"java.lang.String getQueue()":{"name":"getQueue","returnType":"java.lang.String","args":[],"exceptions":[]},"void setMaximumResourceCapability(org.apache.hadoop.yarn.api.records.Resource)":{"name":"setMaximumResourceCapability","returnType":"void","args":["org.apache.hadoop.yarn.api.records.Resource"],"exceptions":[]},"java.nio.ByteBuffer getClientToAMTokenMasterKey()":{"name":"getClientToAMTokenMasterKey","returnType":"java.nio.ByteBuffer","args":[],"exceptions":[]},"void setContainersFromPreviousAttempts(java.util.List)":{"name":"setContainersFromPreviousAttempts","returnType":"void","args":["java.util.List"],"exceptions":[]}}},"org.apache.hadoop.yarn.api.records.ResourceRequest$ResourceRequestComparator":{"name":"org.apache.hadoop.yarn.api.records.ResourceRequest$ResourceRequestComparator","methods":{"int compare(java.lang.Object, java.lang.Object)":{"name":"compare","returnType":"int","args":["java.lang.Object","java.lang.Object"],"exceptions":[]},"int compare(org.apache.hadoop.yarn.api.records.ResourceRequest, org.apache.hadoop.yarn.api.records.ResourceRequest)":{"name":"compare","returnType":"int","args":["org.apache.hadoop.yarn.api.records.ResourceRequest","org.apache.hadoop.yarn.api.records.ResourceRequest"],"exceptions":[]}}},"org.apache.hadoop.yarn.api.records.QueueACL":{"name":"org.apache.hadoop.yarn.api.records.QueueACL","methods":{"org.apache.hadoop.yarn.api.records.QueueACL valueOf(java.lang.String)":{"name":"valueOf","returnType":"org.apache.hadoop.yarn.api.records.QueueACL","args":["java.lang.String"],"exceptions":[]},"[Lorg.apache.hadoop.yarn.api.records.QueueACL; values()":{"name":"values","returnType":"[Lorg.apache.hadoop.yarn.api.records.QueueACL;","args":[],"exceptions":[]}}},"org.apache.hadoop.yarn.api.protocolrecords.GetClusterNodesRequest":{"name":"org.apache.hadoop.yarn.api.protocolrecords.GetClusterNodesRequest","methods":{"org.apache.hadoop.yarn.api.protocolrecords.GetClusterNodesRequest newInstance(java.util.EnumSet)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.protocolrecords.GetClusterNodesRequest","args":["java.util.EnumSet"],"exceptions":[]},"org.apache.hadoop.yarn.api.protocolrecords.GetClusterNodesRequest newInstance()":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.protocolrecords.GetClusterNodesRequest","args":[],"exceptions":[]},"void setNodeStates(java.util.EnumSet)":{"name":"setNodeStates","returnType":"void","args":["java.util.EnumSet"],"exceptions":[]},"java.util.EnumSet getNodeStates()":{"name":"getNodeStates","returnType":"java.util.EnumSet","args":[],"exceptions":[]}}},"org.apache.hadoop.yarn.api.protocolrecords.GetContainerStatusesResponse":{"name":"org.apache.hadoop.yarn.api.protocolrecords.GetContainerStatusesResponse","methods":{"void setContainerStatuses(java.util.List)":{"name":"setContainerStatuses","returnType":"void","args":["java.util.List"],"exceptions":[]},"org.apache.hadoop.yarn.api.protocolrecords.GetContainerStatusesResponse newInstance(java.util.List, java.util.Map)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.protocolrecords.GetContainerStatusesResponse","args":["java.util.List","java.util.Map"],"exceptions":[]},"void setFailedRequests(java.util.Map)":{"name":"setFailedRequests","returnType":"void","args":["java.util.Map"],"exceptions":[]},"java.util.List getContainerStatuses()":{"name":"getContainerStatuses","returnType":"java.util.List","args":[],"exceptions":[]},"java.util.Map getFailedRequests()":{"name":"getFailedRequests","returnType":"java.util.Map","args":[],"exceptions":[]}}},"org.apache.hadoop.yarn.api.records.LocalResource":{"name":"org.apache.hadoop.yarn.api.records.LocalResource","methods":{"void setPattern(java.lang.String)":{"name":"setPattern","returnType":"void","args":["java.lang.String"],"exceptions":[]},"org.apache.hadoop.yarn.api.records.LocalResourceVisibility getVisibility()":{"name":"getVisibility","returnType":"org.apache.hadoop.yarn.api.records.LocalResourceVisibility","args":[],"exceptions":[]},"org.apache.hadoop.yarn.api.records.LocalResource newInstance(org.apache.hadoop.yarn.api.records.URL, org.apache.hadoop.yarn.api.records.LocalResourceType, org.apache.hadoop.yarn.api.records.LocalResourceVisibility, long, long, java.lang.String, boolean)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.records.LocalResource","args":["org.apache.hadoop.yarn.api.records.URL","org.apache.hadoop.yarn.api.records.LocalResourceType","org.apache.hadoop.yarn.api.records.LocalResourceVisibility","long","long","java.lang.String","boolean"],"exceptions":[]},"org.apache.hadoop.yarn.api.records.LocalResource newInstance(org.apache.hadoop.yarn.api.records.URL, org.apache.hadoop.yarn.api.records.LocalResourceType, org.apache.hadoop.yarn.api.records.LocalResourceVisibility, long, long, java.lang.String)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.records.LocalResource","args":["org.apache.hadoop.yarn.api.records.URL","org.apache.hadoop.yarn.api.records.LocalResourceType","org.apache.hadoop.yarn.api.records.LocalResourceVisibility","long","long","java.lang.String"],"exceptions":[]},"long getSize()":{"name":"getSize","returnType":"long","args":[],"exceptions":[]},"void setTimestamp(long)":{"name":"setTimestamp","returnType":"void","args":["long"],"exceptions":[]},"org.apache.hadoop.yarn.api.records.LocalResourceType getType()":{"name":"getType","returnType":"org.apache.hadoop.yarn.api.records.LocalResourceType","args":[],"exceptions":[]},"org.apache.hadoop.yarn.api.records.LocalResource newInstance(org.apache.hadoop.yarn.api.records.URL, org.apache.hadoop.yarn.api.records.LocalResourceType, org.apache.hadoop.yarn.api.records.LocalResourceVisibility, long, long, boolean)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.records.LocalResource","args":["org.apache.hadoop.yarn.api.records.URL","org.apache.hadoop.yarn.api.records.LocalResourceType","org.apache.hadoop.yarn.api.records.LocalResourceVisibility","long","long","boolean"],"exceptions":[]},"void setVisibility(org.apache.hadoop.yarn.api.records.LocalResourceVisibility)":{"name":"setVisibility","returnType":"void","args":["org.apache.hadoop.yarn.api.records.LocalResourceVisibility"],"exceptions":[]},"boolean getShouldBeUploadedToSharedCache()":{"name":"getShouldBeUploadedToSharedCache","returnType":"boolean","args":[],"exceptions":[]},"void setType(org.apache.hadoop.yarn.api.records.LocalResourceType)":{"name":"setType","returnType":"void","args":["org.apache.hadoop.yarn.api.records.LocalResourceType"],"exceptions":[]},"void setShouldBeUploadedToSharedCache(boolean)":{"name":"setShouldBeUploadedToSharedCache","returnType":"void","args":["boolean"],"exceptions":[]},"java.lang.String getPattern()":{"name":"getPattern","returnType":"java.lang.String","args":[],"exceptions":[]},"long getTimestamp()":{"name":"getTimestamp","returnType":"long","args":[],"exceptions":[]},"org.apache.hadoop.yarn.api.records.URL getResource()":{"name":"getResource","returnType":"org.apache.hadoop.yarn.api.records.URL","args":[],"exceptions":[]},"org.apache.hadoop.yarn.api.records.LocalResource newInstance(org.apache.hadoop.yarn.api.records.URL, org.apache.hadoop.yarn.api.records.LocalResourceType, org.apache.hadoop.yarn.api.records.LocalResourceVisibility, long, long)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.records.LocalResource","args":["org.apache.hadoop.yarn.api.records.URL","org.apache.hadoop.yarn.api.records.LocalResourceType","org.apache.hadoop.yarn.api.records.LocalResourceVisibility","long","long"],"exceptions":[]},"void setSize(long)":{"name":"setSize","returnType":"void","args":["long"],"exceptions":[]},"void setResource(org.apache.hadoop.yarn.api.records.URL)":{"name":"setResource","returnType":"void","args":["org.apache.hadoop.yarn.api.records.URL"],"exceptions":[]}}},"org.apache.hadoop.yarn.api.protocolrecords.GetQueueUserAclsInfoRequest":{"name":"org.apache.hadoop.yarn.api.protocolrecords.GetQueueUserAclsInfoRequest","methods":{"org.apache.hadoop.yarn.api.protocolrecords.GetQueueUserAclsInfoRequest newInstance()":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.protocolrecords.GetQueueUserAclsInfoRequest","args":[],"exceptions":[]}}}}} \ No newline at end of file diff --git a/bigtop-tests/spec-tests/runtime/src/test/resources/hadoop-yarn-bin.list b/bigtop-tests/spec-tests/runtime/src/test/resources/hadoop-yarn-bin.list deleted file mode 100644 index d3861b97..00000000 --- a/bigtop-tests/spec-tests/runtime/src/test/resources/hadoop-yarn-bin.list +++ /dev/null @@ -1,3 +0,0 @@ -mapred -yarn -container-executor diff --git a/bigtop-tests/spec-tests/runtime/src/test/resources/hadoop-yarn-client-2.7.3-api-report.json b/bigtop-tests/spec-tests/runtime/src/test/resources/hadoop-yarn-client-2.7.3-api-report.json deleted file mode 100644 index f62ee8ed..00000000 --- a/bigtop-tests/spec-tests/runtime/src/test/resources/hadoop-yarn-client-2.7.3-api-report.json +++ /dev/null @@ -1 +0,0 @@ -{"name":"hadoop-yarn-client","version":"2.7.3","classes":{"org.apache.hadoop.yarn.client.api.YarnClient":{"name":"org.apache.hadoop.yarn.client.api.YarnClient","methods":{"java.util.List getQueueAclsInfo() throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"getQueueAclsInfo","returnType":"java.util.List","args":[],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"org.apache.hadoop.yarn.client.api.YarnClient createYarnClient()":{"name":"createYarnClient","returnType":"org.apache.hadoop.yarn.client.api.YarnClient","args":[],"exceptions":[]},"java.util.List getApplications(java.util.Set) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"getApplications","returnType":"java.util.List","args":["java.util.Set"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"java.util.Map getNodeToLabels() throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"getNodeToLabels","returnType":"java.util.Map","args":[],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"org.apache.hadoop.yarn.api.protocolrecords.ReservationUpdateResponse updateReservation(org.apache.hadoop.yarn.api.protocolrecords.ReservationUpdateRequest) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"updateReservation","returnType":"org.apache.hadoop.yarn.api.protocolrecords.ReservationUpdateResponse","args":["org.apache.hadoop.yarn.api.protocolrecords.ReservationUpdateRequest"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"java.util.List getAllQueues() throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"getAllQueues","returnType":"java.util.List","args":[],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"java.util.List getApplicationAttempts(org.apache.hadoop.yarn.api.records.ApplicationId) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"getApplicationAttempts","returnType":"java.util.List","args":["org.apache.hadoop.yarn.api.records.ApplicationId"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"org.apache.hadoop.yarn.client.api.YarnClientApplication createApplication() throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"createApplication","returnType":"org.apache.hadoop.yarn.client.api.YarnClientApplication","args":[],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"java.util.Set getClusterNodeLabels() throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"getClusterNodeLabels","returnType":"java.util.Set","args":[],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"void moveApplicationAcrossQueues(org.apache.hadoop.yarn.api.records.ApplicationId, java.lang.String) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"moveApplicationAcrossQueues","returnType":"void","args":["org.apache.hadoop.yarn.api.records.ApplicationId","java.lang.String"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"org.apache.hadoop.yarn.api.records.ApplicationAttemptReport getApplicationAttemptReport(org.apache.hadoop.yarn.api.records.ApplicationAttemptId) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"getApplicationAttemptReport","returnType":"org.apache.hadoop.yarn.api.records.ApplicationAttemptReport","args":["org.apache.hadoop.yarn.api.records.ApplicationAttemptId"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"org.apache.hadoop.yarn.api.records.Token getRMDelegationToken(org.apache.hadoop.io.Text) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"getRMDelegationToken","returnType":"org.apache.hadoop.yarn.api.records.Token","args":["org.apache.hadoop.io.Text"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"org.apache.hadoop.yarn.api.protocolrecords.ReservationSubmissionResponse submitReservation(org.apache.hadoop.yarn.api.protocolrecords.ReservationSubmissionRequest) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"submitReservation","returnType":"org.apache.hadoop.yarn.api.protocolrecords.ReservationSubmissionResponse","args":["org.apache.hadoop.yarn.api.protocolrecords.ReservationSubmissionRequest"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"java.util.List getContainers(org.apache.hadoop.yarn.api.records.ApplicationAttemptId) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"getContainers","returnType":"java.util.List","args":["org.apache.hadoop.yarn.api.records.ApplicationAttemptId"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"org.apache.hadoop.yarn.api.records.ApplicationReport getApplicationReport(org.apache.hadoop.yarn.api.records.ApplicationId) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"getApplicationReport","returnType":"org.apache.hadoop.yarn.api.records.ApplicationReport","args":["org.apache.hadoop.yarn.api.records.ApplicationId"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"org.apache.hadoop.yarn.api.records.ApplicationId submitApplication(org.apache.hadoop.yarn.api.records.ApplicationSubmissionContext) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"submitApplication","returnType":"org.apache.hadoop.yarn.api.records.ApplicationId","args":["org.apache.hadoop.yarn.api.records.ApplicationSubmissionContext"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"java.util.Map getLabelsToNodes() throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"getLabelsToNodes","returnType":"java.util.Map","args":[],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"org.apache.hadoop.security.token.Token getAMRMToken(org.apache.hadoop.yarn.api.records.ApplicationId) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"getAMRMToken","returnType":"org.apache.hadoop.security.token.Token","args":["org.apache.hadoop.yarn.api.records.ApplicationId"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"java.util.List getApplications(java.util.EnumSet) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"getApplications","returnType":"java.util.List","args":["java.util.EnumSet"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"java.util.List getRootQueueInfos() throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"getRootQueueInfos","returnType":"java.util.List","args":[],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"org.apache.hadoop.yarn.api.records.QueueInfo getQueueInfo(java.lang.String) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"getQueueInfo","returnType":"org.apache.hadoop.yarn.api.records.QueueInfo","args":["java.lang.String"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"java.util.List getChildQueueInfos(java.lang.String) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"getChildQueueInfos","returnType":"java.util.List","args":["java.lang.String"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"java.util.Map getLabelsToNodes(java.util.Set) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"getLabelsToNodes","returnType":"java.util.Map","args":["java.util.Set"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"org.apache.hadoop.yarn.api.protocolrecords.ReservationDeleteResponse deleteReservation(org.apache.hadoop.yarn.api.protocolrecords.ReservationDeleteRequest) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"deleteReservation","returnType":"org.apache.hadoop.yarn.api.protocolrecords.ReservationDeleteResponse","args":["org.apache.hadoop.yarn.api.protocolrecords.ReservationDeleteRequest"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"org.apache.hadoop.yarn.api.records.YarnClusterMetrics getYarnClusterMetrics() throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"getYarnClusterMetrics","returnType":"org.apache.hadoop.yarn.api.records.YarnClusterMetrics","args":[],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"java.util.List getNodeReports([Lorg.apache.hadoop.yarn.api.records.NodeState;) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"getNodeReports","returnType":"java.util.List","args":["[Lorg.apache.hadoop.yarn.api.records.NodeState;"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"void killApplication(org.apache.hadoop.yarn.api.records.ApplicationId) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"killApplication","returnType":"void","args":["org.apache.hadoop.yarn.api.records.ApplicationId"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"java.util.List getApplications() throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"getApplications","returnType":"java.util.List","args":[],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"java.util.List getApplications(java.util.Set, java.util.EnumSet) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"getApplications","returnType":"java.util.List","args":["java.util.Set","java.util.EnumSet"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"org.apache.hadoop.yarn.api.records.ContainerReport getContainerReport(org.apache.hadoop.yarn.api.records.ContainerId) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"getContainerReport","returnType":"org.apache.hadoop.yarn.api.records.ContainerReport","args":["org.apache.hadoop.yarn.api.records.ContainerId"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]}}},"org.apache.hadoop.yarn.client.api.async.NMClientAsync":{"name":"org.apache.hadoop.yarn.client.api.async.NMClientAsync","methods":{"void setClient(org.apache.hadoop.yarn.client.api.NMClient)":{"name":"setClient","returnType":"void","args":["org.apache.hadoop.yarn.client.api.NMClient"],"exceptions":[]},"void setCallbackHandler(org.apache.hadoop.yarn.client.api.async.NMClientAsync$CallbackHandler)":{"name":"setCallbackHandler","returnType":"void","args":["org.apache.hadoop.yarn.client.api.async.NMClientAsync$CallbackHandler"],"exceptions":[]},"void getContainerStatusAsync(org.apache.hadoop.yarn.api.records.ContainerId, org.apache.hadoop.yarn.api.records.NodeId)":{"name":"getContainerStatusAsync","returnType":"void","args":["org.apache.hadoop.yarn.api.records.ContainerId","org.apache.hadoop.yarn.api.records.NodeId"],"exceptions":[]},"void startContainerAsync(org.apache.hadoop.yarn.api.records.Container, org.apache.hadoop.yarn.api.records.ContainerLaunchContext)":{"name":"startContainerAsync","returnType":"void","args":["org.apache.hadoop.yarn.api.records.Container","org.apache.hadoop.yarn.api.records.ContainerLaunchContext"],"exceptions":[]},"void stopContainerAsync(org.apache.hadoop.yarn.api.records.ContainerId, org.apache.hadoop.yarn.api.records.NodeId)":{"name":"stopContainerAsync","returnType":"void","args":["org.apache.hadoop.yarn.api.records.ContainerId","org.apache.hadoop.yarn.api.records.NodeId"],"exceptions":[]},"org.apache.hadoop.yarn.client.api.NMClient getClient()":{"name":"getClient","returnType":"org.apache.hadoop.yarn.client.api.NMClient","args":[],"exceptions":[]},"org.apache.hadoop.yarn.client.api.async.NMClientAsync$CallbackHandler getCallbackHandler()":{"name":"getCallbackHandler","returnType":"org.apache.hadoop.yarn.client.api.async.NMClientAsync$CallbackHandler","args":[],"exceptions":[]},"org.apache.hadoop.yarn.client.api.async.NMClientAsync createNMClientAsync(org.apache.hadoop.yarn.client.api.async.NMClientAsync$CallbackHandler)":{"name":"createNMClientAsync","returnType":"org.apache.hadoop.yarn.client.api.async.NMClientAsync","args":["org.apache.hadoop.yarn.client.api.async.NMClientAsync$CallbackHandler"],"exceptions":[]}}},"org.apache.hadoop.yarn.client.api.AMRMClient":{"name":"org.apache.hadoop.yarn.client.api.AMRMClient","methods":{"org.apache.hadoop.yarn.client.api.NMTokenCache getNMTokenCache()":{"name":"getNMTokenCache","returnType":"org.apache.hadoop.yarn.client.api.NMTokenCache","args":[],"exceptions":[]},"void addContainerRequest(org.apache.hadoop.yarn.client.api.AMRMClient$ContainerRequest)":{"name":"addContainerRequest","returnType":"void","args":["org.apache.hadoop.yarn.client.api.AMRMClient$ContainerRequest"],"exceptions":[]},"void updateBlacklist(java.util.List, java.util.List)":{"name":"updateBlacklist","returnType":"void","args":["java.util.List","java.util.List"],"exceptions":[]},"java.util.List getMatchingRequests(org.apache.hadoop.yarn.api.records.Priority, java.lang.String, org.apache.hadoop.yarn.api.records.Resource)":{"name":"getMatchingRequests","returnType":"java.util.List","args":["org.apache.hadoop.yarn.api.records.Priority","java.lang.String","org.apache.hadoop.yarn.api.records.Resource"],"exceptions":[]},"void waitFor(com.google.common.base.Supplier) throws java.lang.InterruptedException":{"name":"waitFor","returnType":"void","args":["com.google.common.base.Supplier"],"exceptions":["java.lang.InterruptedException"]},"org.apache.hadoop.yarn.api.records.Resource getAvailableResources()":{"name":"getAvailableResources","returnType":"org.apache.hadoop.yarn.api.records.Resource","args":[],"exceptions":[]},"void waitFor(com.google.common.base.Supplier, int) throws java.lang.InterruptedException":{"name":"waitFor","returnType":"void","args":["com.google.common.base.Supplier","int"],"exceptions":["java.lang.InterruptedException"]},"org.apache.hadoop.yarn.api.protocolrecords.RegisterApplicationMasterResponse registerApplicationMaster(java.lang.String, int, java.lang.String) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"registerApplicationMaster","returnType":"org.apache.hadoop.yarn.api.protocolrecords.RegisterApplicationMasterResponse","args":["java.lang.String","int","java.lang.String"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"void unregisterApplicationMaster(org.apache.hadoop.yarn.api.records.FinalApplicationStatus, java.lang.String, java.lang.String) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"unregisterApplicationMaster","returnType":"void","args":["org.apache.hadoop.yarn.api.records.FinalApplicationStatus","java.lang.String","java.lang.String"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"void waitFor(com.google.common.base.Supplier, int, int) throws java.lang.InterruptedException":{"name":"waitFor","returnType":"void","args":["com.google.common.base.Supplier","int","int"],"exceptions":["java.lang.InterruptedException"]},"int getClusterNodeCount()":{"name":"getClusterNodeCount","returnType":"int","args":[],"exceptions":[]},"void releaseAssignedContainer(org.apache.hadoop.yarn.api.records.ContainerId)":{"name":"releaseAssignedContainer","returnType":"void","args":["org.apache.hadoop.yarn.api.records.ContainerId"],"exceptions":[]},"void setNMTokenCache(org.apache.hadoop.yarn.client.api.NMTokenCache)":{"name":"setNMTokenCache","returnType":"void","args":["org.apache.hadoop.yarn.client.api.NMTokenCache"],"exceptions":[]},"void removeContainerRequest(org.apache.hadoop.yarn.client.api.AMRMClient$ContainerRequest)":{"name":"removeContainerRequest","returnType":"void","args":["org.apache.hadoop.yarn.client.api.AMRMClient$ContainerRequest"],"exceptions":[]},"org.apache.hadoop.yarn.client.api.AMRMClient createAMRMClient()":{"name":"createAMRMClient","returnType":"org.apache.hadoop.yarn.client.api.AMRMClient","args":[],"exceptions":[]},"org.apache.hadoop.yarn.api.protocolrecords.AllocateResponse allocate(float) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"allocate","returnType":"org.apache.hadoop.yarn.api.protocolrecords.AllocateResponse","args":["float"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]}}},"org.apache.hadoop.yarn.client.api.YarnClientApplication":{"name":"org.apache.hadoop.yarn.client.api.YarnClientApplication","methods":{"org.apache.hadoop.yarn.api.protocolrecords.GetNewApplicationResponse getNewApplicationResponse()":{"name":"getNewApplicationResponse","returnType":"org.apache.hadoop.yarn.api.protocolrecords.GetNewApplicationResponse","args":[],"exceptions":[]},"org.apache.hadoop.yarn.api.records.ApplicationSubmissionContext getApplicationSubmissionContext()":{"name":"getApplicationSubmissionContext","returnType":"org.apache.hadoop.yarn.api.records.ApplicationSubmissionContext","args":[],"exceptions":[]}}},"org.apache.hadoop.yarn.client.api.AHSClient":{"name":"org.apache.hadoop.yarn.client.api.AHSClient","methods":{"org.apache.hadoop.yarn.api.records.ApplicationAttemptReport getApplicationAttemptReport(org.apache.hadoop.yarn.api.records.ApplicationAttemptId) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"getApplicationAttemptReport","returnType":"org.apache.hadoop.yarn.api.records.ApplicationAttemptReport","args":["org.apache.hadoop.yarn.api.records.ApplicationAttemptId"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"java.util.List getApplications() throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"getApplications","returnType":"java.util.List","args":[],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"java.util.List getContainers(org.apache.hadoop.yarn.api.records.ApplicationAttemptId) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"getContainers","returnType":"java.util.List","args":["org.apache.hadoop.yarn.api.records.ApplicationAttemptId"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"org.apache.hadoop.yarn.api.records.ApplicationReport getApplicationReport(org.apache.hadoop.yarn.api.records.ApplicationId) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"getApplicationReport","returnType":"org.apache.hadoop.yarn.api.records.ApplicationReport","args":["org.apache.hadoop.yarn.api.records.ApplicationId"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"org.apache.hadoop.yarn.client.api.AHSClient createAHSClient()":{"name":"createAHSClient","returnType":"org.apache.hadoop.yarn.client.api.AHSClient","args":[],"exceptions":[]},"org.apache.hadoop.yarn.api.records.ContainerReport getContainerReport(org.apache.hadoop.yarn.api.records.ContainerId) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"getContainerReport","returnType":"org.apache.hadoop.yarn.api.records.ContainerReport","args":["org.apache.hadoop.yarn.api.records.ContainerId"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"java.util.List getApplicationAttempts(org.apache.hadoop.yarn.api.records.ApplicationId) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"getApplicationAttempts","returnType":"java.util.List","args":["org.apache.hadoop.yarn.api.records.ApplicationId"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]}}},"org.apache.hadoop.yarn.client.api.async.AMRMClientAsync":{"name":"org.apache.hadoop.yarn.client.api.async.AMRMClientAsync","methods":{"void addContainerRequest(org.apache.hadoop.yarn.client.api.AMRMClient$ContainerRequest)":{"name":"addContainerRequest","returnType":"void","args":["org.apache.hadoop.yarn.client.api.AMRMClient$ContainerRequest"],"exceptions":[]},"void updateBlacklist(java.util.List, java.util.List)":{"name":"updateBlacklist","returnType":"void","args":["java.util.List","java.util.List"],"exceptions":[]},"java.util.List getMatchingRequests(org.apache.hadoop.yarn.api.records.Priority, java.lang.String, org.apache.hadoop.yarn.api.records.Resource)":{"name":"getMatchingRequests","returnType":"java.util.List","args":["org.apache.hadoop.yarn.api.records.Priority","java.lang.String","org.apache.hadoop.yarn.api.records.Resource"],"exceptions":[]},"void waitFor(com.google.common.base.Supplier) throws java.lang.InterruptedException":{"name":"waitFor","returnType":"void","args":["com.google.common.base.Supplier"],"exceptions":["java.lang.InterruptedException"]},"org.apache.hadoop.yarn.client.api.async.AMRMClientAsync createAMRMClientAsync(int, org.apache.hadoop.yarn.client.api.async.AMRMClientAsync$CallbackHandler)":{"name":"createAMRMClientAsync","returnType":"org.apache.hadoop.yarn.client.api.async.AMRMClientAsync","args":["int","org.apache.hadoop.yarn.client.api.async.AMRMClientAsync$CallbackHandler"],"exceptions":[]},"org.apache.hadoop.yarn.api.records.Resource getAvailableResources()":{"name":"getAvailableResources","returnType":"org.apache.hadoop.yarn.api.records.Resource","args":[],"exceptions":[]},"void waitFor(com.google.common.base.Supplier, int) throws java.lang.InterruptedException":{"name":"waitFor","returnType":"void","args":["com.google.common.base.Supplier","int"],"exceptions":["java.lang.InterruptedException"]},"org.apache.hadoop.yarn.client.api.async.AMRMClientAsync createAMRMClientAsync(org.apache.hadoop.yarn.client.api.AMRMClient, int, org.apache.hadoop.yarn.client.api.async.AMRMClientAsync$CallbackHandler)":{"name":"createAMRMClientAsync","returnType":"org.apache.hadoop.yarn.client.api.async.AMRMClientAsync","args":["org.apache.hadoop.yarn.client.api.AMRMClient","int","org.apache.hadoop.yarn.client.api.async.AMRMClientAsync$CallbackHandler"],"exceptions":[]},"org.apache.hadoop.yarn.api.protocolrecords.RegisterApplicationMasterResponse registerApplicationMaster(java.lang.String, int, java.lang.String) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"registerApplicationMaster","returnType":"org.apache.hadoop.yarn.api.protocolrecords.RegisterApplicationMasterResponse","args":["java.lang.String","int","java.lang.String"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"void unregisterApplicationMaster(org.apache.hadoop.yarn.api.records.FinalApplicationStatus, java.lang.String, java.lang.String) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"unregisterApplicationMaster","returnType":"void","args":["org.apache.hadoop.yarn.api.records.FinalApplicationStatus","java.lang.String","java.lang.String"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"void waitFor(com.google.common.base.Supplier, int, int) throws java.lang.InterruptedException":{"name":"waitFor","returnType":"void","args":["com.google.common.base.Supplier","int","int"],"exceptions":["java.lang.InterruptedException"]},"int getClusterNodeCount()":{"name":"getClusterNodeCount","returnType":"int","args":[],"exceptions":[]},"void releaseAssignedContainer(org.apache.hadoop.yarn.api.records.ContainerId)":{"name":"releaseAssignedContainer","returnType":"void","args":["org.apache.hadoop.yarn.api.records.ContainerId"],"exceptions":[]},"void removeContainerRequest(org.apache.hadoop.yarn.client.api.AMRMClient$ContainerRequest)":{"name":"removeContainerRequest","returnType":"void","args":["org.apache.hadoop.yarn.client.api.AMRMClient$ContainerRequest"],"exceptions":[]},"void setHeartbeatInterval(int)":{"name":"setHeartbeatInterval","returnType":"void","args":["int"],"exceptions":[]}}},"org.apache.hadoop.yarn.client.api.NMClient":{"name":"org.apache.hadoop.yarn.client.api.NMClient","methods":{"void stopContainer(org.apache.hadoop.yarn.api.records.ContainerId, org.apache.hadoop.yarn.api.records.NodeId) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"stopContainer","returnType":"void","args":["org.apache.hadoop.yarn.api.records.ContainerId","org.apache.hadoop.yarn.api.records.NodeId"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"org.apache.hadoop.yarn.api.records.ContainerStatus getContainerStatus(org.apache.hadoop.yarn.api.records.ContainerId, org.apache.hadoop.yarn.api.records.NodeId) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"getContainerStatus","returnType":"org.apache.hadoop.yarn.api.records.ContainerStatus","args":["org.apache.hadoop.yarn.api.records.ContainerId","org.apache.hadoop.yarn.api.records.NodeId"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"org.apache.hadoop.yarn.client.api.NMTokenCache getNMTokenCache()":{"name":"getNMTokenCache","returnType":"org.apache.hadoop.yarn.client.api.NMTokenCache","args":[],"exceptions":[]},"org.apache.hadoop.yarn.client.api.NMClient createNMClient()":{"name":"createNMClient","returnType":"org.apache.hadoop.yarn.client.api.NMClient","args":[],"exceptions":[]},"java.util.Map startContainer(org.apache.hadoop.yarn.api.records.Container, org.apache.hadoop.yarn.api.records.ContainerLaunchContext) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"startContainer","returnType":"java.util.Map","args":["org.apache.hadoop.yarn.api.records.Container","org.apache.hadoop.yarn.api.records.ContainerLaunchContext"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"org.apache.hadoop.yarn.client.api.NMClient createNMClient(java.lang.String)":{"name":"createNMClient","returnType":"org.apache.hadoop.yarn.client.api.NMClient","args":["java.lang.String"],"exceptions":[]},"void setNMTokenCache(org.apache.hadoop.yarn.client.api.NMTokenCache)":{"name":"setNMTokenCache","returnType":"void","args":["org.apache.hadoop.yarn.client.api.NMTokenCache"],"exceptions":[]},"void cleanupRunningContainersOnStop(boolean)":{"name":"cleanupRunningContainersOnStop","returnType":"void","args":["boolean"],"exceptions":[]}}}}} \ No newline at end of file diff --git a/bigtop-tests/spec-tests/runtime/src/test/resources/hadoop-yarn-common-2.7.3-api-report.json b/bigtop-tests/spec-tests/runtime/src/test/resources/hadoop-yarn-common-2.7.3-api-report.json deleted file mode 100644 index b394bff9..00000000 --- a/bigtop-tests/spec-tests/runtime/src/test/resources/hadoop-yarn-common-2.7.3-api-report.json +++ /dev/null @@ -1 +0,0 @@ -{"name":"hadoop-yarn-common","version":"2.7.3","classes":{"org.apache.hadoop.yarn.security.ContainerTokenSelector":{"name":"org.apache.hadoop.yarn.security.ContainerTokenSelector","methods":{"org.apache.hadoop.security.token.Token selectToken(org.apache.hadoop.io.Text, java.util.Collection)":{"name":"selectToken","returnType":"org.apache.hadoop.security.token.Token","args":["org.apache.hadoop.io.Text","java.util.Collection"],"exceptions":[]}}},"org.apache.hadoop.yarn.security.ContainerManagerSecurityInfo":{"name":"org.apache.hadoop.yarn.security.ContainerManagerSecurityInfo","methods":{"org.apache.hadoop.security.KerberosInfo getKerberosInfo(java.lang.Class, org.apache.hadoop.conf.Configuration)":{"name":"getKerberosInfo","returnType":"org.apache.hadoop.security.KerberosInfo","args":["java.lang.Class","org.apache.hadoop.conf.Configuration"],"exceptions":[]},"org.apache.hadoop.security.token.TokenInfo getTokenInfo(java.lang.Class, org.apache.hadoop.conf.Configuration)":{"name":"getTokenInfo","returnType":"org.apache.hadoop.security.token.TokenInfo","args":["java.lang.Class","org.apache.hadoop.conf.Configuration"],"exceptions":[]}}},"org.apache.hadoop.yarn.security.SchedulerSecurityInfo":{"name":"org.apache.hadoop.yarn.security.SchedulerSecurityInfo","methods":{"org.apache.hadoop.security.KerberosInfo getKerberosInfo(java.lang.Class, org.apache.hadoop.conf.Configuration)":{"name":"getKerberosInfo","returnType":"org.apache.hadoop.security.KerberosInfo","args":["java.lang.Class","org.apache.hadoop.conf.Configuration"],"exceptions":[]},"org.apache.hadoop.security.token.TokenInfo getTokenInfo(java.lang.Class, org.apache.hadoop.conf.Configuration)":{"name":"getTokenInfo","returnType":"org.apache.hadoop.security.token.TokenInfo","args":["java.lang.Class","org.apache.hadoop.conf.Configuration"],"exceptions":[]}}},"org.apache.hadoop.yarn.util.SystemClock":{"name":"org.apache.hadoop.yarn.util.SystemClock","methods":{"long getTime()":{"name":"getTime","returnType":"long","args":[],"exceptions":[]}}},"org.apache.hadoop.yarn.security.client.RMDelegationTokenSelector":{"name":"org.apache.hadoop.yarn.security.client.RMDelegationTokenSelector","methods":{"org.apache.hadoop.security.token.Token selectToken(org.apache.hadoop.io.Text, java.util.Collection)":{"name":"selectToken","returnType":"org.apache.hadoop.security.token.Token","args":["org.apache.hadoop.io.Text","java.util.Collection"],"exceptions":[]}}},"org.apache.hadoop.yarn.security.client.ClientRMSecurityInfo":{"name":"org.apache.hadoop.yarn.security.client.ClientRMSecurityInfo","methods":{"org.apache.hadoop.security.KerberosInfo getKerberosInfo(java.lang.Class, org.apache.hadoop.conf.Configuration)":{"name":"getKerberosInfo","returnType":"org.apache.hadoop.security.KerberosInfo","args":["java.lang.Class","org.apache.hadoop.conf.Configuration"],"exceptions":[]},"org.apache.hadoop.security.token.TokenInfo getTokenInfo(java.lang.Class, org.apache.hadoop.conf.Configuration)":{"name":"getTokenInfo","returnType":"org.apache.hadoop.security.token.TokenInfo","args":["java.lang.Class","org.apache.hadoop.conf.Configuration"],"exceptions":[]}}},"org.apache.hadoop.yarn.security.admin.AdminSecurityInfo":{"name":"org.apache.hadoop.yarn.security.admin.AdminSecurityInfo","methods":{"org.apache.hadoop.security.KerberosInfo getKerberosInfo(java.lang.Class, org.apache.hadoop.conf.Configuration)":{"name":"getKerberosInfo","returnType":"org.apache.hadoop.security.KerberosInfo","args":["java.lang.Class","org.apache.hadoop.conf.Configuration"],"exceptions":[]},"org.apache.hadoop.security.token.TokenInfo getTokenInfo(java.lang.Class, org.apache.hadoop.conf.Configuration)":{"name":"getTokenInfo","returnType":"org.apache.hadoop.security.token.TokenInfo","args":["java.lang.Class","org.apache.hadoop.conf.Configuration"],"exceptions":[]}}},"org.apache.hadoop.yarn.client.ClientRMProxy":{"name":"org.apache.hadoop.yarn.client.ClientRMProxy","methods":{"org.apache.hadoop.io.Text getRMDelegationTokenService(org.apache.hadoop.conf.Configuration)":{"name":"getRMDelegationTokenService","returnType":"org.apache.hadoop.io.Text","args":["org.apache.hadoop.conf.Configuration"],"exceptions":[]},"org.apache.hadoop.io.Text getAMRMTokenService(org.apache.hadoop.conf.Configuration)":{"name":"getAMRMTokenService","returnType":"org.apache.hadoop.io.Text","args":["org.apache.hadoop.conf.Configuration"],"exceptions":[]},"org.apache.hadoop.io.Text getTokenService(org.apache.hadoop.conf.Configuration, java.lang.String, java.lang.String, int)":{"name":"getTokenService","returnType":"org.apache.hadoop.io.Text","args":["org.apache.hadoop.conf.Configuration","java.lang.String","java.lang.String","int"],"exceptions":[]},"java.lang.Object createRMProxy(org.apache.hadoop.conf.Configuration, java.lang.Class) throws java.io.IOException":{"name":"createRMProxy","returnType":"java.lang.Object","args":["org.apache.hadoop.conf.Configuration","java.lang.Class"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.yarn.util.Clock":{"name":"org.apache.hadoop.yarn.util.Clock","methods":{"long getTime()":{"name":"getTime","returnType":"long","args":[],"exceptions":[]}}}}} \ No newline at end of file diff --git a/bigtop-tests/spec-tests/runtime/src/test/resources/hadoop-yarn-jar.list b/bigtop-tests/spec-tests/runtime/src/test/resources/hadoop-yarn-jar.list deleted file mode 100644 index 26613d4e..00000000 --- a/bigtop-tests/spec-tests/runtime/src/test/resources/hadoop-yarn-jar.list +++ /dev/null @@ -1,38 +0,0 @@ -netty-3\.6\.2\.Final[\.\-_].*jar -leveldbjni-all-1\.8[\.\-_].*jar -jackson-core-asl-1\.9\.13[\.\-_].*jar -jackson-xc-1\.9\.13[\.\-_].*jar -jersey-server-1\.9[\.\-_].*jar -stax-api-1\.0-2[\.\-_].*jar -zookeeper-3\.4\.6[\.\-_].*jar -guice-3\.0[\.\-_].*jar -jaxb-impl-2\.2\.3-1[\.\-_].*jar -zookeeper-3\.4\.6.*-tests\.jar -jersey-client-1\.9[\.\-_].*jar -commons-cli-1\.2[\.\-_].*jar -log4j-1\.2\.17[\.\-_].*jar -jackson-mapper-asl-1\.9\.13[\.\-_].*jar -guava-11\.0\.2[\.\-_].*jar -jetty-6\.1\.26[\.\-_].*jar -commons-logging-1\.1\.3[\.\-_].*jar -jersey-core-1\.9[\.\-_].*jar -jersey-guice-1\.9[\.\-_].*jar -commons-compress-1\.4\.1[\.\-_].*jar -jettison-1\.1[\.\-_].*jar -commons-collections-3\.2\.[12][\.\-_].*jar -xz-1\.0[\.\-_].*jar -asm-3\.2[\.\-_].*jar -commons-codec-1\.4[\.\-_].*jar -aopalliance-1\.0[\.\-_].*jar -javax\.inject-1[\.\-_].*jar -commons-lang-2\.6[\.\-_].*jar -jetty-util-6\.1\.26[\.\-_].*jar -jsr305-3\.0\.0[\.\-_].*jar -protobuf-java-2\.5\.0[\.\-_].*jar -commons-io-2\.4[\.\-_].*jar -activation-1\.1[\.\-_].*jar -jersey-json-1\.9[\.\-_].*jar -jaxb-api-2\.2\.2[\.\-_].*jar -guice-servlet-3\.0[\.\-_].*jar -servlet-api-2\.5[\.\-_].*jar -jackson-jaxrs-1\.9\.13[\.\-_].*jar diff --git a/bigtop-tests/spec-tests/runtime/src/test/resources/hadoop-yarn.list b/bigtop-tests/spec-tests/runtime/src/test/resources/hadoop-yarn.list deleted file mode 100644 index bb880052..00000000 --- a/bigtop-tests/spec-tests/runtime/src/test/resources/hadoop-yarn.list +++ /dev/null @@ -1,74 +0,0 @@ -hadoop-yarn-server-sharedcachemanager.*\.jar -bin -bin/mapred -bin/container-executor -bin/yarn -sbin -sbin/yarn-daemon\.sh -sbin/yarn-daemons\.sh -hadoop-yarn-registry-2\.7\.[0-9].*\.jar -hadoop-yarn-applications-unmanaged-am-launcher-2\.7\.[0-9].*\.jar -hadoop-yarn-common-2\.7\.[0-9].*\.jar -hadoop-yarn-server-nodemanager.*\.jar -hadoop-yarn-server-applicationhistoryservice-2\.7\.[0-9].*\.jar -hadoop-yarn-server-common.*\.jar -etc -etc/hadoop -hadoop-yarn-server-common-2\.7\.[0-9].*\.jar -hadoop-yarn-server-tests.*\.jar -hadoop-yarn-server-resourcemanager.*\.jar -hadoop-yarn-server-web-proxy.*\.jar -hadoop-yarn-api-2\.7\.[0-9].*\.jar -hadoop-yarn-common.*\.jar -hadoop-yarn-server-web-proxy-2\.7\.[0-9].*\.jar -hadoop-yarn-applications-distributedshell-2\.7\.[0-9].*\.jar -hadoop-yarn-server-tests-2\.7\.[0-9].*\.jar -hadoop-yarn-server-resourcemanager-2\.7\.[0-9].*\.jar -hadoop-yarn-registry.*\.jar -hadoop-yarn-server-sharedcachemanager-2\.7\.[0-9].*\.jar -hadoop-yarn-client-2\.7\.[0-9].*\.jar -hadoop-yarn-applications-distributedshell.*\.jar -hadoop-yarn-server-nodemanager-2\.7\.[0-9].*\.jar -hadoop-yarn-api.*\.jar -hadoop-yarn-client.*\.jar -lib -lib/commons-cli-1\.2.*\.jar -lib/leveldbjni-all-1\.8.*\.jar -lib/jaxb-api-2\.2\.2.*\.jar -lib/jettison-1\.1.*\.jar -lib/commons-io-2\.4.*\.jar -lib/jetty-util-6\.1\.26.*\.jar -lib/jaxb-impl-2\.2\.3-1.*\.jar -lib/jersey-guice-1\.9.*\.jar -lib/netty-3\.6\.2\.Final.*\.jar -lib/jersey-core-1\.9.*\.jar -lib/jackson-mapper-asl-1\.9\.13.*\.jar -lib/asm-3\.2.*\.jar -lib/commons-compress-1\.4\.1.*\.jar -lib/aopalliance-1\.0.*\.jar -lib/jackson-xc-1\.9\.13.*\.jar -lib/jersey-json-1\.9.*\.jar -lib/commons-codec-1\.4.*\.jar -lib/jackson-core-asl-1\.9\.13.*\.jar -lib/servlet-api-2\.5.*\.jar -lib/jetty-6\.1\.26.*\.jar -lib/jersey-server-1\.9.*\.jar -lib/log4j-1\.2\.17.*\.jar -lib/zookeeper-3\.4\.6.*-tests\.jar -lib/stax-api-1\.0-2.*\.jar -lib/jersey-client-1\.9.*\.jar -lib/xz-1\.0.*\.jar -lib/zookeeper-3\.4\.6.*\.jar -lib/activation-1\.1.*\.jar -lib/javax\.inject-1.*\.jar -lib/protobuf-java-2\.5\.0.*\.jar -lib/guice-3\.0.*\.jar -lib/guava-11\.0\.2.*\.jar -lib/jsr305-3\.0\.0.*\.jar -lib/jackson-jaxrs-1\.9\.13.*\.jar -lib/commons-collections-3\.2\.[1-2].*\.jar -lib/commons-logging-1\.1\.3.*\.jar -lib/commons-lang-2\.6.*\.jar -lib/guice-servlet-3\.0.*\.jar -hadoop-yarn-server-applicationhistoryservice.*\.jar -hadoop-yarn-applications-unmanaged-am-launcher.*\.jar diff --git a/bigtop-tests/spec-tests/runtime/src/test/resources/testRuntimeSpecConf.groovy b/bigtop-tests/spec-tests/runtime/src/test/resources/testRuntimeSpecConf.groovy deleted file mode 100644 index 339de4cb..00000000 --- a/bigtop-tests/spec-tests/runtime/src/test/resources/testRuntimeSpecConf.groovy +++ /dev/null @@ -1,430 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 - *

- * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -specs { - tests { - 'HADOOP_EJH1' { - name = 'HADOOP_EJH1' - type = 'envdir' - arguments { - envcmd = 'hadoop envvars' - variable = 'JAVA_HOME' - } - } - 'HADOOP_EC1' { - name = 'HADOOP_EC1' - type = 'envdir' - arguments { - envcmd = 'hadoop envvars' - variable = 'HADOOP_TOOLS_PATH' - donotcheckexistance = true - } - } - 'HADOOP_EC2' { - name = 'HADOOP_EC2' - type = 'envdir' - arguments { - envcmd = 'hadoop envvars' - variable = 'HADOOP_COMMON_HOME' - } - } - 'HADOOP_EC3' { - name = 'HADOOP_EC3' - type = 'envdir' - arguments { - envcmd = 'hadoop envvars' - variable = 'HADOOP_COMMON_DIR' - relative = true - } - } - 'HADOOP_EC4' { - name = 'HADOOP_EC4' - type = 'envdir' - arguments { - envcmd = 'hadoop envvars' - variable = 'HADOOP_COMMON_LIB_JARS_DIR' - relative = true - } - } - 'HADOOP_EC5' { - name = 'HADOOP_EC5' - type = 'envdir' - arguments { - envcmd = 'hadoop envvars' - variable = 'HADOOP_CONF_DIR' - } - } - 'HADOOP_EH1' { - name = 'HADOOP_EH1' - type = 'envdir' - arguments { - envcmd = 'hdfs envvars' - variable = 'HADOOP_HDFS_HOME' - } - } - 'HADOOP_EH2' { - name = 'HADOOP_EH2' - type = 'envdir' - arguments { - envcmd = 'hdfs envvars' - variable = 'HDFS_DIR' - relative = true - } - } - 'HADOOP_EH3' { - name = 'HADOOP_EH3' - type = 'envdir' - arguments { - envcmd = 'hdfs envvars' - variable = 'HDFS_LIB_JARS_DIR' - relative = true - } - } - 'HADOOP_EY1' { - name = 'HADOOP_EY1' - type = 'envdir' - arguments { - envcmd = 'yarn envvars' - variable = 'HADOOP_YARN_HOME' - } - } - 'HADOOP_EY2' { - name = 'HADOOP_EY2' - type = 'envdir' - arguments { - envcmd = 'yarn envvars' - variable = 'YARN_DIR' - relative = true - } - } - 'HADOOP_EY3' { - name = 'HADOOP_EY3' - type = 'envdir' - arguments { - envcmd = 'yarn envvars' - variable = 'YARN_LIB_JARS_DIR' - relative = true - } - } - 'HADOOP_EM1' { - name = 'HADOOP_EM1' - type = 'envdir' - arguments { - envcmd = 'mapred envvars' - variable = 'HADOOP_MAPRED_HOME' - } - } - 'HADOOP_EM2' { - name = 'HADOOP_EM2' - type = 'envdir' - arguments { - envcmd = 'mapred envvars' - variable = 'MAPRED_DIR' - relative = true - } - } - 'HADOOP_EM3' { - name = 'HADOOP_EM3' - type = 'envdir' - arguments { - envcmd = 'mapred envvars' - variable = 'MAPRED_LIB_JARS_DIR' - relative = true - } - } - 'HADOOP_EJH2_HADOOP' { - name = 'HADOOP_EJH2_HADOOP' - type = 'shell' - arguments { - command = '[ "${JAVA_HOME}xxx" != "xxx" ] || grep -E "^\\s*export\\s+JAVA_HOME=[\\w/]+" `hadoop envvars | grep HADOOP_CONF_DIR | sed "s|[^=]\\+=\'\\([^\']\\+\\)\'$|\\1|g"`/hadoop-env.sh' - message = 'JAVA_HOME is not set' - } - } - 'HADOOP_EJH2_YARN' { - name = 'HADOOP_EJH2_YARN' - type = 'shell' - arguments { - command = '[ "${JAVA_HOME}xxx" != "xxx" ] || grep -E "^\\s*export\\s+JAVA_HOME=[\\w/]+" `hadoop envvars | grep HADOOP_CONF_DIR | sed "s|[^=]\\+=\'\\([^\']\\+\\)\'$|\\1|g"`/yarn-env.sh' - message = 'JAVA_HOME is not set' - } - } - 'HADOOP_PLATVER_1' { - name = 'HADOOP_PLATVER' - type = 'shell' - arguments { - command = 'hadoop version | head -n 1 | grep -E \'Hadoop\\s+[0-9\\.]+[_\\-][A-Za-z_0-9]+\'' - message = 'Hadoop\'s version string is not correct' - } - } - 'HADOOP_DIRSTRUCT_COMMON' { - name = 'HADOOP_DIRSTRUCT_COMMON' - type = 'dirstruct' - arguments { - envcmd = 'hadoop envvars' - baseDirEnv = 'HADOOP_COMMON_HOME' - referenceList = 'hadoop-common.list' - } - } - 'HADOOP_DIRSTRUCT_HDFS' { - name = 'HADOOP_DIRSTRUCT_HDFS' - type = 'dirstruct' - arguments { - envcmd = 'hdfs envvars' - baseDirEnv = 'HADOOP_HDFS_HOME' - referenceList = 'hadoop-hdfs.list' - } - } - 'HADOOP_DIRSTRUCT_MAPRED' { - name = 'HADOOP_DIRSTRUCT_MAPRED' - type = 'dirstruct' - arguments { - envcmd = 'mapred envvars' - baseDirEnv = 'HADOOP_MAPRED_HOME' - referenceList = 'hadoop-mapreduce.list' - } - } - 'HADOOP_DIRSTRUCT_YARN' { - name = 'HADOOP_DIRSTRUCT_YARN' - type = 'dirstruct' - arguments { - envcmd = 'yarn envvars' - baseDirEnv = 'HADOOP_YARN_HOME' - referenceList = 'hadoop-yarn.list' - } - } - 'HADOOP_SUBPROJS' { - name = 'HADOOP_SUBPROJS' - type = 'dirstruct' - arguments { - envcmd = 'hadoop envvars' - baseDirEnv = 'HADOOP_COMMON_HOME' - referenceList = 'hadoop-subprojs.list' - } - } - 'HADOOP_BINCONTENT_COMMON' { - name = 'HADOOP_BINCONTENT_COMMON' - type = 'dirstruct' - arguments { - envcmd = 'hadoop envvars' - baseDirEnv = 'HADOOP_COMMON_HOME' - subDir = 'bin' - referenceList = 'hadoop-common-bin.list' - } - } - 'HADOOP_BINCONTENT_HDFS' { - name = 'HADOOP_BINCONTENT_HDFS' - type = 'dirstruct' - arguments { - envcmd = 'hdfs envvars' - baseDirEnv = 'HADOOP_HDFS_HOME' - subDir = 'bin' - referenceList = 'hadoop-hdfs-bin.list' - } - } - 'HADOOP_BINCONTENT_MAPRED' { - name = 'HADOOP_BINCONTENT_MAPRED' - type = 'dirstruct' - arguments { - envcmd = 'mapred envvars' - baseDirEnv = 'HADOOP_MAPRED_HOME' - subDir = 'bin' - referenceList = 'hadoop-mapreduce-bin.list' - } - } - 'HADOOP_BINCONTENT_YARN' { - name = 'HADOOP_BINCONTENT_YARN' - type = 'dirstruct' - arguments { - envcmd = 'yarn envvars' - baseDirEnv = 'HADOOP_YARN_HOME' - subDir = 'bin' - referenceList = 'hadoop-yarn-bin.list' - } - } - 'HADOOP_LIBJARSCONTENT_COMMON' { - name = 'HADOOP_JARCONTENT_COMMON' - type = 'dirstruct' - arguments { - envcmd = 'hadoop envvars' - baseDirEnv = 'HADOOP_COMMON_HOME' - subDirEnv = 'HADOOP_COMMON_LIB_JARS_DIR' - referenceList = 'hadoop-common-jar.list' - } - } - 'HADOOP_LIBJARSCONTENT_HDFS' { - name = 'HADOOP_JARCONTENT_HDFS' - type = 'dirstruct' - arguments { - envcmd = 'hdfs envvars' - baseDirEnv = 'HADOOP_HDFS_HOME' - subDirEnv = 'HDFS_LIB_JARS_DIR' - referenceList = 'hadoop-hdfs-jar.list' - } - } - 'HADOOP_LIBJARSCONTENT_MAPRED' { - name = 'HADOOP_JARCONTENT_MAPRED' - type = 'dirstruct' - arguments { - envcmd = 'mapred envvars' - baseDirEnv = 'HADOOP_MAPRED_HOME' - subDirEnv = 'MAPRED_LIB_JARS_DIR' - referenceList = 'hadoop-mapreduce-jar.list' - } - } - 'HADOOP_LIBJARSCONTENT_YARN' { - name = 'HADOOP_JARCONTENT_YARN' - type = 'dirstruct' - arguments { - envcmd = 'yarn envvars' - baseDirEnv = 'HADOOP_YARN_HOME' - subDirEnv = 'YARN_LIB_JARS_DIR' - referenceList = 'hadoop-yarn-jar.list' - } - } - 'HADOOP_GETCONF' { - name = 'HADOOP_GETCONF' - type = 'shell' - arguments { - command = '[ `hdfs getconf -confKey dfs.permissions.superusergroup >/dev/null 2>/dev/null; echo $?` == "0" ]' - message = 'It\' not possible to to determine key Hadoop configuration values by using ${HADOOP_HDFS_HOME}/bin/hdfs getconf' - } - } - 'HADOOP_CNATIVE1' { - name = 'HADOOP_CNATIVE1' - type = 'shell' - arguments { - command = 'hadoop checknative -a 2>/dev/null | grep hadoop | grep true' - message = 'hadoop-common-project must be build with -Pnative or -Pnative-win' - } - } - 'HADOOP_CNATIVE2' { - name = 'HADOOP_CNATIVE2' - type = 'shell' - arguments { - command = 'hadoop checknative -a 2>/dev/null | grep snappy | grep true' - message = 'hadoop-common-project must be build with -Prequire.snappy' - } - } - 'HADOOP_HNATIVE1' { - name = 'HADOOP_HNATIVE1' - type = 'shell' - arguments { - command = '[ ! -n ${HADOOP_COMMON_HOME} ] || HADOOP_COMMON_HOME=`hadoop envvars | grep HADOOP_COMMON_HOME | sed "s/.*=\'\\(.*\\)\'/\\1/"`; '+ - 'test -e $HADOOP_COMMON_HOME/lib/native/libhdfs.a' - message = 'hadoop-hdfs-project must be build with -Pnative or -Pnative-win' - } - } - 'HADOOP_YNATIVE1' { - name = 'HADOOP_YNATIVE1' - type = 'shell' - arguments { - command = '[ ! -n ${HADOOP_YARN_HOME} ] || HADOOP_YARN_HOME=`yarn envvars | grep HADOOP_YARN_HOME | sed "s/.*=\'\\(.*\\)\'/\\1/"`; '+ - 'echo $HADOOP_YARN_HOME; test -e $HADOOP_YARN_HOME/bin/container-executor' - message = 'hadoop-yarn-project must be build with -Pnative or -Pnative-win' - } - } - 'HADOOP_MNATIVE1' { - name = 'HADOOP_MNATIVE1' - type = 'shell' - arguments { - command = 'hadoop checknative -a 2>/dev/null | grep snappy | grep true' - message = 'hadoop-mapreduce-project must be build with -Prequire.snappy' - } - } - 'HADOOP_COMPRESSION' { - name = 'HADOOP_COMPRESSION' - type = 'shell' - arguments { - command = '[[ "$(hadoop checknative -a 2>/dev/null | egrep -e ^zlib -e ^snappy | sort -u | grep true | wc -l)" == 2 ]]' - message = 'hadoop must be built with -Dcompile.native=true' - } - } - 'HADOOP_TOOLS' { - name = 'HADOOP_TOOLS' - type = 'hadoop_tools' - arguments { - } - } - 'HADOOP_API1' { - name = "HADOOP_API1" - type = 'api_examination' - arguments { - baseDirEnv = 'HADOOP_COMMON_HOME' - libDir = 'HADOOP_COMMON_DIR' - envcmd = 'hadoop envvars' - jar = 'hadoop-common' - resourceFile = 'hadoop-common-2.7.3-api-report.json' - } - } - 'HADOOP_API2' { - name = "HADOOP_API2" - type = 'api_examination' - arguments { - baseDirEnv = 'HADOOP_HDFS_HOME' - libDir = 'HDFS_DIR' - envcmd = 'hdfs envvars' - jar = 'hadoop-hdfs' - resourceFile = 'hadoop-hdfs-2.7.3-api-report.json' - } - } - 'HADOOP_API3' { - name = "HADOOP_API3" - type = 'api_examination' - arguments { - baseDirEnv = 'HADOOP_YARN_HOME' - libDir = 'YARN_DIR' - envcmd = 'yarn envvars' - jar = 'hadoop-yarn-common' - resourceFile = 'hadoop-yarn-common-2.7.3-api-report.json' - } - } - 'HADOOP_API4' { - name = "HADOOP_API4" - type = 'api_examination' - arguments { - baseDirEnv = 'HADOOP_YARN_HOME' - libDir = 'YARN_DIR' - envcmd = 'yarn envvars' - jar = 'hadoop-yarn-client' - resourceFile = 'hadoop-yarn-client-2.7.3-api-report.json' - } - } - 'HADOOP_API5' { - name = "HADOOP_API5" - type = 'api_examination' - arguments { - baseDirEnv = 'HADOOP_YARN_HOME' - libDir = 'YARN_DIR' - envcmd = 'yarn envvars' - jar = 'hadoop-yarn-api' - resourceFile = 'hadoop-yarn-api-2.7.3-api-report.json' - } - } - 'HADOOP_API6' { - name = "HADOOP_API6" - type = 'api_examination' - arguments { - baseDirEnv = 'HADOOP_MAPRED_HOME' - libDir = 'MAPRED_DIR' - envcmd = 'mapred envvars' - jar = 'hadoop-mapreduce-client-core' - resourceFile = 'hadoop-mapreduce-client-core-2.7.3-api-report.json' - } - } - } -} diff --git a/build.gradle b/build.gradle index 101ecc74..b0a83b15 100644 --- a/build.gradle +++ b/build.gradle @@ -80,6 +80,8 @@ rat { "bigtop-tests/smoke-tests/phoenix/*.sql", "bigtop-tests/smoke-tests/ignite-hadoop/*.data", "bigtop-tests/smoke-tests/tajo/table1/*.csv", + "bigtop-tests/smoke-tests/odpi-runtime/src/test/resources/*.json", + "bigtop-tests/smoke-tests/odpi-runtime/src/test/resources/*.list", "**/target/**", "**/build/**", "**/.gradle/**", @@ -427,7 +429,6 @@ task "bigtop-slaves"(type:Exec, project.afterEvaluate { checkClusterTestProjects("smoke.tests") - checkClusterTestProjects("spec.tests") artifactToInstall(dependsOn: [installTopLevel, installCommon, installConf, installiTest]) } diff --git a/settings.gradle b/settings.gradle index 7ae4bf57..ac071f1b 100644 --- a/settings.gradle +++ b/settings.gradle @@ -31,9 +31,4 @@ fTree.each() { buildFile -> def parent = buildFile.getParentFile().name include("bigtop-tests:smoke-tests:$parent") } -fTree = fileTree(dir: 'bigtop-tests/spec-tests', include: '*/build.gradle') -fTree.each() { buildFile -> - def parent = buildFile.getParentFile().name - include("bigtop-tests:spec-tests:$parent") -} -- cgit v1.2.3