build.dir ${user.dir}/build build.dir.hive ${build.dir}/hive hadoop.tmp.dir ${build.dir.hive}/test/hadoop-${user.name} A base for other temporary directories. hive.exec.scratchdir ${build.dir}/scratchdir Scratch space for Hive jobs hive.exec.local.scratchdir ${build.dir}/localscratchdir/ Local scratch space for Hive jobs javax.jdo.option.ConnectionURL jdbc:derby:;databaseName=../build/test/junit_metastore_db;create=true javax.jdo.option.ConnectionDriverName org.apache.derby.jdbc.EmbeddedDriver javax.jdo.option.ConnectionUserName APP javax.jdo.option.ConnectionPassword mine hive.metastore.warehouse.dir ${test.warehouse.dir} hive.metastore.metadb.dir ${build.dir}/test/data/metadb/ Required by metastore server or if the uris argument below is not supplied test.log.dir ${build.dir}/test/logs test.src.dir ${build.dir}/src/test hive.jar.path ${build.dir.hive}/ql/hive-exec-${version}.jar hive.metastore.rawstore.impl org.apache.hadoop.hive.metastore.ObjectStore Name of the class that implements org.apache.hadoop.hive.metastore.rawstore interface. This class is used to store and retrieval of raw metadata objects such as table, database hive.querylog.location ${build.dir}/tmp Location of the structured hive logs hive.task.progress false Track progress of a task hive.support.concurrency false Whether hive supports concurrency or not. A zookeeper instance must be up and running for the default hive lock manager to support read-write locks. fs.pfile.impl org.apache.hadoop.fs.ProxyLocalFileSystem A proxy for local file system used for cross file system testing hive.exec.mode.local.auto false Let hive determine whether to run in local mode automatically Disabling this for tests so that minimr is not affected hive.auto.convert.join false Whether Hive enable the optimization about converting common join into mapjoin based on the input file size hive.ignore.mapjoin.hint false Whether Hive ignores the mapjoin hint hive.input.format org.apache.hadoop.hive.ql.io.CombineHiveInputFormat The default input format, if it is not specified, the system assigns it. It is set to HiveInputFormat for hadoop versions 17, 18 and 19, whereas it is set to CombineHiveInputFormat for hadoop 20. The user can always overwrite it - if there is a bug in CombineHiveInputFormat, it can always be manually set to HiveInputFormat. hive.default.rcfile.serde org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe The default SerDe hive will use for the rcfile format