aboutsummaryrefslogtreecommitdiff
path: root/lib/hadoop-0.20.0/contrib
diff options
context:
space:
mode:
Diffstat (limited to 'lib/hadoop-0.20.0/contrib')
-rw-r--r--lib/hadoop-0.20.0/contrib/capacity-scheduler/hadoop-0.20.0-capacity-scheduler.jarbin51224 -> 0 bytes
-rw-r--r--lib/hadoop-0.20.0/contrib/datajoin/hadoop-0.20.0-datajoin.jarbin12667 -> 0 bytes
-rw-r--r--lib/hadoop-0.20.0/contrib/eclipse-plugin/hadoop-0.20.0-eclipse-plugin.jarbin3009728 -> 0 bytes
-rw-r--r--lib/hadoop-0.20.0/contrib/fairscheduler/hadoop-0.20.0-fairscheduler.jarbin37087 -> 0 bytes
-rw-r--r--lib/hadoop-0.20.0/contrib/hdfsproxy/README30
-rwxr-xr-xlib/hadoop-0.20.0/contrib/hdfsproxy/bin/hdfsproxy170
-rwxr-xr-xlib/hadoop-0.20.0/contrib/hdfsproxy/bin/hdfsproxy-config.sh67
-rwxr-xr-xlib/hadoop-0.20.0/contrib/hdfsproxy/bin/hdfsproxy-daemon.sh141
-rwxr-xr-xlib/hadoop-0.20.0/contrib/hdfsproxy/bin/hdfsproxy-daemons.sh34
-rwxr-xr-xlib/hadoop-0.20.0/contrib/hdfsproxy/bin/hdfsproxy-slaves.sh68
-rwxr-xr-xlib/hadoop-0.20.0/contrib/hdfsproxy/bin/start-hdfsproxy.sh37
-rwxr-xr-xlib/hadoop-0.20.0/contrib/hdfsproxy/bin/stop-hdfsproxy.sh28
-rw-r--r--lib/hadoop-0.20.0/contrib/hdfsproxy/build.xml183
-rw-r--r--lib/hadoop-0.20.0/contrib/hdfsproxy/conf/configuration.xsl24
-rw-r--r--lib/hadoop-0.20.0/contrib/hdfsproxy/conf/hdfsproxy-default.xml59
-rw-r--r--lib/hadoop-0.20.0/contrib/hdfsproxy/conf/hdfsproxy-env.sh44
-rw-r--r--lib/hadoop-0.20.0/contrib/hdfsproxy/conf/hdfsproxy-env.sh.template44
-rw-r--r--lib/hadoop-0.20.0/contrib/hdfsproxy/conf/hdfsproxy-hosts1
-rw-r--r--lib/hadoop-0.20.0/contrib/hdfsproxy/conf/log4j.properties61
-rw-r--r--lib/hadoop-0.20.0/contrib/hdfsproxy/conf/user-certs.xml26
-rw-r--r--lib/hadoop-0.20.0/contrib/hdfsproxy/conf/user-permissions.xml28
-rw-r--r--lib/hadoop-0.20.0/contrib/hdfsproxy/hdfsproxy-1.0.jarbin21572 -> 0 bytes
-rw-r--r--lib/hadoop-0.20.0/contrib/index/hadoop-0.20.0-index.jarbin63178 -> 0 bytes
-rw-r--r--lib/hadoop-0.20.0/contrib/streaming/hadoop-0.20.0-streaming.jarbin68304 -> 0 bytes
-rw-r--r--lib/hadoop-0.20.0/contrib/thriftfs/hadoop-0.20.0-thriftfs.jarbin10434 -> 0 bytes
-rwxr-xr-xlib/hadoop-0.20.0/contrib/vaidya/bin/vaidya.sh47
-rw-r--r--lib/hadoop-0.20.0/contrib/vaidya/conf/postex_diagnosis_tests.xml104
-rw-r--r--lib/hadoop-0.20.0/contrib/vaidya/hadoop-0.20.0-vaidya.jarbin42201 -> 0 bytes
28 files changed, 0 insertions, 1196 deletions
diff --git a/lib/hadoop-0.20.0/contrib/capacity-scheduler/hadoop-0.20.0-capacity-scheduler.jar b/lib/hadoop-0.20.0/contrib/capacity-scheduler/hadoop-0.20.0-capacity-scheduler.jar
deleted file mode 100644
index b4900e565e..0000000000
--- a/lib/hadoop-0.20.0/contrib/capacity-scheduler/hadoop-0.20.0-capacity-scheduler.jar
+++ /dev/null
Binary files differ
diff --git a/lib/hadoop-0.20.0/contrib/datajoin/hadoop-0.20.0-datajoin.jar b/lib/hadoop-0.20.0/contrib/datajoin/hadoop-0.20.0-datajoin.jar
deleted file mode 100644
index 21294d4d1d..0000000000
--- a/lib/hadoop-0.20.0/contrib/datajoin/hadoop-0.20.0-datajoin.jar
+++ /dev/null
Binary files differ
diff --git a/lib/hadoop-0.20.0/contrib/eclipse-plugin/hadoop-0.20.0-eclipse-plugin.jar b/lib/hadoop-0.20.0/contrib/eclipse-plugin/hadoop-0.20.0-eclipse-plugin.jar
deleted file mode 100644
index 7b316393f6..0000000000
--- a/lib/hadoop-0.20.0/contrib/eclipse-plugin/hadoop-0.20.0-eclipse-plugin.jar
+++ /dev/null
Binary files differ
diff --git a/lib/hadoop-0.20.0/contrib/fairscheduler/hadoop-0.20.0-fairscheduler.jar b/lib/hadoop-0.20.0/contrib/fairscheduler/hadoop-0.20.0-fairscheduler.jar
deleted file mode 100644
index 758b98367c..0000000000
--- a/lib/hadoop-0.20.0/contrib/fairscheduler/hadoop-0.20.0-fairscheduler.jar
+++ /dev/null
Binary files differ
diff --git a/lib/hadoop-0.20.0/contrib/hdfsproxy/README b/lib/hadoop-0.20.0/contrib/hdfsproxy/README
deleted file mode 100644
index 2c33988926..0000000000
--- a/lib/hadoop-0.20.0/contrib/hdfsproxy/README
+++ /dev/null
@@ -1,30 +0,0 @@
-HDFSPROXY is an HTTPS proxy server that exposes the same HSFTP interface as a
-real cluster. It authenticates users via user certificates and enforce access
-control based on configuration files.
-
-Starting up an HDFSPROXY server is similar to starting up an HDFS cluster.
-Simply run "hdfsproxy" shell command. The main configuration file is
-hdfsproxy-default.xml, which should be on the classpath. hdfsproxy-env.sh
-can be used to set up environmental variables. In particular, JAVA_HOME should
-be set. Additional configuration files include user-certs.xml,
-user-permissions.xml and ssl-server.xml, which are used to specify allowed user
-certs, allowed directories/files, and ssl keystore information for the proxy,
-respectively. The location of these files can be specified in
-hdfsproxy-default.xml. Environmental variable HDFSPROXY_CONF_DIR can be used to
-point to the directory where these configuration files are located. The
-configuration files of the proxied HDFS cluster should also be available on the
-classpath (hdfs-default.xml and hdfs-site.xml).
-
-Mirroring those used in HDFS, a few shell scripts are provided to start and
-stop a group of proxy servers. The hosts to run hdfsproxy on are specified in
-hdfsproxy-hosts file, one host per line. All hdfsproxy servers are stateless
-and run independently from each other. Simple load balancing can be set up by
-mapping all hdfsproxy server IP addresses to a single hostname. Users should
-use that hostname to access the proxy. If an IP address look up for that
-hostname returns more than one IP addresses, an HFTP/HSFTP client will randomly
-pick one to use.
-
-Command "hdfsproxy -reloadPermFiles" can be used to trigger reloading of
-user-certs.xml and user-permissions.xml files on all proxy servers listed in
-the hdfsproxy-hosts file. Similarly, "hdfsproxy -clearUgiCache" command can be
-used to clear the UGI caches on all proxy servers.
diff --git a/lib/hadoop-0.20.0/contrib/hdfsproxy/bin/hdfsproxy b/lib/hadoop-0.20.0/contrib/hdfsproxy/bin/hdfsproxy
deleted file mode 100755
index 1b1e597891..0000000000
--- a/lib/hadoop-0.20.0/contrib/hdfsproxy/bin/hdfsproxy
+++ /dev/null
@@ -1,170 +0,0 @@
-#!/usr/bin/env bash
-
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements. See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License. You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-
-# The HdfsProxy command script
-#
-# Environment Variables
-#
-# JAVA_HOME The java implementation to use. Overrides JAVA_HOME.
-#
-# HDFSPROXY_CLASSPATH Extra Java CLASSPATH entries.
-#
-# HDFSPROXY_HEAPSIZE The maximum amount of heap to use, in MB.
-# Default is 1000.
-#
-# HDFSPROXY_OPTS Extra Java runtime options.
-#
-# HDFSPROXY_NAMENODE_OPTS These options are added to HDFSPROXY_OPTS
-# HDFSPROXY_CLIENT_OPTS when the respective command is run.
-# HDFSPROXY_{COMMAND}_OPTS etc HDFSPROXY_JT_OPTS applies to JobTracker
-# for e.g. HDFSPROXY_CLIENT_OPTS applies to
-# more than one command (fs, dfs, fsck,
-# dfsadmin etc)
-#
-# HDFSPROXY_CONF_DIR Alternate conf dir. Default is ${HDFSPROXY_HOME}/conf.
-#
-# HDFSPROXY_ROOT_LOGGER The root appender. Default is INFO,console
-#
-
-bin=`dirname "$0"`
-bin=`cd "$bin"; pwd`
-
-. "$bin"/hdfsproxy-config.sh
-
-cygwin=false
-case "`uname`" in
-CYGWIN*) cygwin=true;;
-esac
-
-if [ -f "${HDFSPROXY_CONF_DIR}/hdfsproxy-env.sh" ]; then
- . "${HDFSPROXY_CONF_DIR}/hdfsproxy-env.sh"
-fi
-
-# some Java parameters
-if [ "$JAVA_HOME" != "" ]; then
- #echo "run java in $JAVA_HOME"
- JAVA_HOME=$JAVA_HOME
-fi
-
-if [ "$JAVA_HOME" = "" ]; then
- echo "Error: JAVA_HOME is not set."
- exit 1
-fi
-
-JAVA=$JAVA_HOME/bin/java
-JAVA_HEAP_MAX=-Xmx1000m
-
-# check envvars which might override default args
-if [ "$HDFSPROXY_HEAPSIZE" != "" ]; then
- #echo "run with heapsize $HDFSPROXY_HEAPSIZE"
- JAVA_HEAP_MAX="-Xmx""$HDFSPROXY_HEAPSIZE""m"
- #echo $JAVA_HEAP_MAX
-fi
-
-# CLASSPATH initially contains $HDFSPROXY_CONF_DIR
-CLASSPATH="${HDFSPROXY_CONF_DIR}"
-CLASSPATH=${CLASSPATH}:$JAVA_HOME/lib/tools.jar
-
-# for developers, add HdfsProxy classes to CLASSPATH
-if [ -d "$HDFSPROXY_HOME/build/classes" ]; then
- CLASSPATH=${CLASSPATH}:$HDFSPROXY_HOME/build/classes
-fi
-if [ -d "$HDFSPROXY_HOME/build/webapps" ]; then
- CLASSPATH=${CLASSPATH}:$HDFSPROXY_HOME/build
-fi
-if [ -d "$HDFSPROXY_HOME/build/test/classes" ]; then
- CLASSPATH=${CLASSPATH}:$HDFSPROXY_HOME/build/test/classes
-fi
-
-# so that filenames w/ spaces are handled correctly in loops below
-IFS=
-
-# for releases, add hdfsproxy jar & webapps to CLASSPATH
-if [ -d "$HDFSPROXY_HOME/webapps" ]; then
- CLASSPATH=${CLASSPATH}:$HDFSPROXY_HOME
-fi
-for f in $HDFSPROXY_HOME/hdfsproxy-*.jar; do
- CLASSPATH=${CLASSPATH}:$f;
-done
-
-# add libs to CLASSPATH
-if [ -d "$HDFSPROXY_HOME/lib" ]; then
- for f in $HDFSPROXY_HOME/lib/*.jar; do
- CLASSPATH=${CLASSPATH}:$f;
- done
-fi
-
-if [ -d "$HDFSPROXY_HOME/../../" ]; then
- for f in $HDFSPROXY_HOME/../../*.jar; do
- CLASSPATH=${CLASSPATH}:$f;
- done
-fi
-if [ -d "$HDFSPROXY_HOME/../../lib" ]; then
- for f in $HDFSPROXY_HOME/../../lib/*.jar; do
- CLASSPATH=${CLASSPATH}:$f;
- done
-fi
-if [ -d "$HDFSPROXY_HOME/../../lib/jsp-2.1" ]; then
- for f in $HDFSPROXY_HOME/../../lib/jsp-2.1/*.jar; do
- CLASSPATH=${CLASSPATH}:$f;
- done
-fi
-
-
-# add user-specified CLASSPATH last
-if [ "$HDFSPROXY_CLASSPATH" != "" ]; then
- CLASSPATH=${CLASSPATH}:${HDFSPROXY_CLASSPATH}
-fi
-
-# default log directory & file
-if [ "$HDFSPROXY_LOG_DIR" = "" ]; then
- HDFSPROXY_LOG_DIR="$HDFSPROXY_HOME/logs"
-fi
-if [ "$HDFSPROXY_LOGFILE" = "" ]; then
- HDFSPROXY_LOGFILE='hdfsproxy.log'
-fi
-
-# restore ordinary behaviour
-unset IFS
-
-# figure out which class to run
-CLASS='org.apache.hadoop.hdfsproxy.HdfsProxy'
-
-# cygwin path translation
-if $cygwin; then
- CLASSPATH=`cygpath -p -w "$CLASSPATH"`
- HDFSPROXY_HOME=`cygpath -d "$HDFSPROXY_HOME"`
- HDFSPROXY_LOG_DIR=`cygpath -d "$HDFSPROXY_LOG_DIR"`
-fi
-
-# cygwin path translation
-if $cygwin; then
- JAVA_LIBRARY_PATH=`cygpath -p "$JAVA_LIBRARY_PATH"`
-fi
-
-HDFSPROXY_OPTS="$HDFSPROXY_OPTS -Dhdfsproxy.log.dir=$HDFSPROXY_LOG_DIR"
-HDFSPROXY_OPTS="$HDFSPROXY_OPTS -Dhdfsproxy.log.file=$HDFSPROXY_LOGFILE"
-HDFSPROXY_OPTS="$HDFSPROXY_OPTS -Dhdfsproxy.home.dir=$HDFSPROXY_HOME"
-HDFSPROXY_OPTS="$HDFSPROXY_OPTS -Dhdfsproxy.id.str=$HDFSPROXY_IDENT_STRING"
-HDFSPROXY_OPTS="$HDFSPROXY_OPTS -Dhdfsproxy.root.logger=${HDFSPROXY_ROOT_LOGGER:-INFO,console}"
-if [ "x$JAVA_LIBRARY_PATH" != "x" ]; then
- HDFSPROXY_OPTS="$HDFSPROXY_OPTS -Djava.library.path=$JAVA_LIBRARY_PATH"
-fi
-
-# run it
-exec "$JAVA" $JAVA_HEAP_MAX $HDFSPROXY_OPTS -classpath "$CLASSPATH" $CLASS "$@"
diff --git a/lib/hadoop-0.20.0/contrib/hdfsproxy/bin/hdfsproxy-config.sh b/lib/hadoop-0.20.0/contrib/hdfsproxy/bin/hdfsproxy-config.sh
deleted file mode 100755
index 8fe6aac68b..0000000000
--- a/lib/hadoop-0.20.0/contrib/hdfsproxy/bin/hdfsproxy-config.sh
+++ /dev/null
@@ -1,67 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements. See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License. You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-# included in all the hadoop scripts with source command
-# should not be executable directly
-# also should not be passed any arguments, since we need original $*
-
-# resolve links - $0 may be a softlink
-
-this="$0"
-while [ -h "$this" ]; do
- ls=`ls -ld "$this"`
- link=`expr "$ls" : '.*-> \(.*\)$'`
- if expr "$link" : '.*/.*' > /dev/null; then
- this="$link"
- else
- this=`dirname "$this"`/"$link"
- fi
-done
-
-# convert relative path to absolute path
-bin=`dirname "$this"`
-script=`basename "$this"`
-bin=`cd "$bin"; pwd`
-this="$bin/$script"
-
-# the root of the HdfsProxy installation
-export HDFSPROXY_HOME=`dirname "$this"`/..
-
-#check to see if the conf dir is given as an optional argument
-if [ $# -gt 1 ]
-then
- if [ "--config" = "$1" ]
- then
- shift
- confdir=$1
- shift
- HDFSPROXY_CONF_DIR=$confdir
- fi
-fi
-
-# Allow alternate conf dir location.
-HDFSPROXY_CONF_DIR="${HDFSPROXY_CONF_DIR:-$HDFSPROXY_HOME/conf}"
-
-#check to see it is specified whether to use the slaves file
-if [ $# -gt 1 ]
-then
- if [ "--hosts" = "$1" ]
- then
- shift
- slavesfile=$1
- shift
- export HDFSPROXY_SLAVES="${HDFSPROXY_CONF_DIR}/$slavesfile"
- fi
-fi
diff --git a/lib/hadoop-0.20.0/contrib/hdfsproxy/bin/hdfsproxy-daemon.sh b/lib/hadoop-0.20.0/contrib/hdfsproxy/bin/hdfsproxy-daemon.sh
deleted file mode 100755
index 6d5a75247f..0000000000
--- a/lib/hadoop-0.20.0/contrib/hdfsproxy/bin/hdfsproxy-daemon.sh
+++ /dev/null
@@ -1,141 +0,0 @@
-#!/usr/bin/env bash
-
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements. See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License. You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-
-# Runs a HdfsProxy as a daemon.
-#
-# Environment Variables
-#
-# HDFSPROXY_CONF_DIR Alternate conf dir. Default is ${HDFSPROXY_HOME}/conf.
-# HDFSPROXY_LOG_DIR Where log files are stored. PWD by default.
-# HDFSPROXY_MASTER host:path where hdfsproxy code should be rsync'd from
-# HDFSPROXY_PID_DIR The pid files are stored. /tmp by default.
-# HDFSPROXY_IDENT_STRING A string representing this instance of hdfsproxy. $USER by default
-# HDFSPROXY_NICENESS The scheduling priority for daemons. Defaults to 0.
-##
-
-usage="Usage: hdfsproxy-daemon.sh [--config <conf-dir>] [--hosts hostlistfile] (start|stop) "
-
-# if no args specified, show usage
-if [ $# -le 1 ]; then
- echo $usage
- exit 1
-fi
-
-bin=`dirname "$0"`
-bin=`cd "$bin"; pwd`
-
-. "$bin"/hdfsproxy-config.sh
-
-# get arguments
-startStop=$1
-shift
-
-hdfsproxy_rotate_log ()
-{
- log=$1;
- num=5;
- if [ -n "$2" ]; then
- num=$2
- fi
- if [ -f "$log" ]; then # rotate logs
- while [ $num -gt 1 ]; do
- prev=`expr $num - 1`
- [ -f "$log.$prev" ] && mv "$log.$prev" "$log.$num"
- num=$prev
- done
- mv "$log" "$log.$num";
- fi
-}
-
-if [ -f "${HDFSPROXY_CONF_DIR}/hdfsproxy-env.sh" ]; then
- . "${HDFSPROXY_CONF_DIR}/hdfsproxy-env.sh"
-fi
-
-# get log directory
-if [ "$HDFSPROXY_LOG_DIR" = "" ]; then
- export HDFSPROXY_LOG_DIR="$HDFSPROXY_HOME/logs"
-fi
-mkdir -p "$HDFSPROXY_LOG_DIR"
-
-if [ "$HDFSPROXY_PID_DIR" = "" ]; then
- HDFSPROXY_PID_DIR=/tmp
-fi
-
-if [ "$HDFSPROXY_IDENT_STRING" = "" ]; then
- export HDFSPROXY_IDENT_STRING="$USER"
-fi
-
-# some variables
-export HDFSPROXY_LOGFILE=hdfsproxy-$HDFSPROXY_IDENT_STRING-$HOSTNAME.log
-export HDFSPROXY_ROOT_LOGGER="INFO,DRFA"
-log=$HDFSPROXY_LOG_DIR/hdfsproxy-$HDFSPROXY_IDENT_STRING-$HOSTNAME.out
-pid=$HDFSPROXY_PID_DIR/hdfsproxy-$HDFSPROXY_IDENT_STRING.pid
-
-# Set default scheduling priority
-if [ "$HDFSPROXY_NICENESS" = "" ]; then
- export HDFSPROXY_NICENESS=0
-fi
-
-case $startStop in
-
- (start)
-
- mkdir -p "$HDFSPROXY_PID_DIR"
-
- if [ -f $pid ]; then
- if kill -0 `cat $pid` > /dev/null 2>&1; then
- echo hdfsproxy running as process `cat $pid`. Stop it first.
- exit 1
- fi
- fi
-
- if [ "$HDFSPROXY_MASTER" != "" ]; then
- echo rsync from $HDFSPROXY_MASTER
- rsync -a -e ssh --delete --exclude=.svn --exclude='logs/*' --exclude='contrib/hod/logs/*' $HDFSPROXY_MASTER/ "$HDFSPROXY_HOME"
- fi
-
- hdfsproxy_rotate_log $log
- echo starting hdfsproxy, logging to $log
- cd "$HDFSPROXY_HOME"
- nohup nice -n $HDFSPROXY_NICENESS "$HDFSPROXY_HOME"/bin/hdfsproxy --config $HDFSPROXY_CONF_DIR "$@" > "$log" 2>&1 < /dev/null &
- echo $! > $pid
- sleep 1; head "$log"
- ;;
-
- (stop)
-
- if [ -f $pid ]; then
- if kill -0 `cat $pid` > /dev/null 2>&1; then
- echo stopping hdfsproxy
- kill `cat $pid`
- else
- echo no hdfsproxy to stop
- fi
- else
- echo no hdfsproxy to stop
- fi
- ;;
-
- (*)
- echo $usage
- exit 1
- ;;
-
-esac
-
-
diff --git a/lib/hadoop-0.20.0/contrib/hdfsproxy/bin/hdfsproxy-daemons.sh b/lib/hadoop-0.20.0/contrib/hdfsproxy/bin/hdfsproxy-daemons.sh
deleted file mode 100755
index 7dd8568a3b..0000000000
--- a/lib/hadoop-0.20.0/contrib/hdfsproxy/bin/hdfsproxy-daemons.sh
+++ /dev/null
@@ -1,34 +0,0 @@
-#!/usr/bin/env bash
-
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements. See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License. You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-
-# Run a HdfsProxy command on all slave hosts.
-
-usage="Usage: hdfsproxy-daemons.sh [--config confdir] [--hosts hostlistfile] [start|stop] "
-
-# if no args specified, show usage
-if [ $# -le 1 ]; then
- echo $usage
- exit 1
-fi
-
-bin=`dirname "$0"`
-bin=`cd "$bin"; pwd`
-
-. $bin/hdfsproxy-config.sh
-
-exec "$bin/hdfsproxy-slaves.sh" --config $HDFSPROXY_CONF_DIR cd "$HDFSPROXY_HOME" \; "$bin/hdfsproxy-daemon.sh" --config $HDFSPROXY_CONF_DIR "$@"
diff --git a/lib/hadoop-0.20.0/contrib/hdfsproxy/bin/hdfsproxy-slaves.sh b/lib/hadoop-0.20.0/contrib/hdfsproxy/bin/hdfsproxy-slaves.sh
deleted file mode 100755
index db54bd5b38..0000000000
--- a/lib/hadoop-0.20.0/contrib/hdfsproxy/bin/hdfsproxy-slaves.sh
+++ /dev/null
@@ -1,68 +0,0 @@
-#!/usr/bin/env bash
-
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements. See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License. You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-
-# Run a shell command on all slave hosts.
-#
-# Environment Variables
-#
-# HDFSPROXY_SLAVES File naming remote hosts.
-# Default is ${HDFSPROXY_CONF_DIR}/hdfsproxy-hosts.
-# HDFSPROXY_CONF_DIR Alternate conf dir. Default is ${HDFSPROXY_HOME}/conf.
-# HDFSPROXY_SLAVE_SLEEP Seconds to sleep between spawning remote commands.
-# HDFSPROXY_SSH_OPTS Options passed to ssh when running remote commands.
-##
-
-usage="Usage: hdfsproxy-slaves.sh [--config confdir] command..."
-
-# if no args specified, show usage
-if [ $# -le 0 ]; then
- echo $usage
- exit 1
-fi
-
-bin=`dirname "$0"`
-bin=`cd "$bin"; pwd`
-
-. "$bin"/hdfsproxy-config.sh
-
-# If the slaves file is specified in the command line,
-# then it takes precedence over the definition in
-# hdfsproxy-env.sh. Save it here.
-HOSTLIST=$HDFSPROXY_SLAVES
-
-if [ -f "${HDFSPROXY_CONF_DIR}/hdfsproxy-env.sh" ]; then
- . "${HDFSPROXY_CONF_DIR}/hdfsproxy-env.sh"
-fi
-
-if [ "$HOSTLIST" = "" ]; then
- if [ "$HDFSPROXY_SLAVES" = "" ]; then
- export HOSTLIST="${HDFSPROXY_CONF_DIR}/hdfsproxy-hosts"
- else
- export HOSTLIST="${HDFSPROXY_SLAVES}"
- fi
-fi
-
-for slave in `cat "$HOSTLIST"`; do
- ssh $HDFSPROXY_SSH_OPTS $slave $"${@// /\\ }" \
- 2>&1 | sed "s/^/$slave: /" &
- if [ "$HDFSPROXY_SLAVE_SLEEP" != "" ]; then
- sleep $HDFSPROXY_SLAVE_SLEEP
- fi
-done
-
-wait
diff --git a/lib/hadoop-0.20.0/contrib/hdfsproxy/bin/start-hdfsproxy.sh b/lib/hadoop-0.20.0/contrib/hdfsproxy/bin/start-hdfsproxy.sh
deleted file mode 100755
index 2592d9c8cc..0000000000
--- a/lib/hadoop-0.20.0/contrib/hdfsproxy/bin/start-hdfsproxy.sh
+++ /dev/null
@@ -1,37 +0,0 @@
-#!/usr/bin/env bash
-
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements. See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License. You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-
-# Start hdfsproxy daemons.
-# Run this on master node.
-
-usage="Usage: start-hdfsproxy.sh"
-
-bin=`dirname "$0"`
-bin=`cd "$bin"; pwd`
-
-. "$bin"/hdfsproxy-config.sh
-
-# get arguments
-if [ $# -ge 1 ]; then
- echo $usage
- exit 1
-fi
-
-# start hdfsproxy daemons
-# "$bin"/hdfsproxy-daemon.sh --config $HDFSPROXY_CONF_DIR start
-"$bin"/hdfsproxy-daemons.sh --config $HDFSPROXY_CONF_DIR --hosts hdfsproxy-hosts start
diff --git a/lib/hadoop-0.20.0/contrib/hdfsproxy/bin/stop-hdfsproxy.sh b/lib/hadoop-0.20.0/contrib/hdfsproxy/bin/stop-hdfsproxy.sh
deleted file mode 100755
index 78089e31cf..0000000000
--- a/lib/hadoop-0.20.0/contrib/hdfsproxy/bin/stop-hdfsproxy.sh
+++ /dev/null
@@ -1,28 +0,0 @@
-#!/usr/bin/env bash
-
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements. See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License. You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-
-# Stop hdfsproxy daemons. Run this on master node.
-
-bin=`dirname "$0"`
-bin=`cd "$bin"; pwd`
-
-. "$bin"/hdfsproxy-config.sh
-
-# "$bin"/hdfsproxy-daemon.sh --config $HDFSPROXY_CONF_DIR stop
-"$bin"/hdfsproxy-daemons.sh --config $HDFSPROXY_CONF_DIR --hosts hdfsproxy-hosts stop
-
diff --git a/lib/hadoop-0.20.0/contrib/hdfsproxy/build.xml b/lib/hadoop-0.20.0/contrib/hdfsproxy/build.xml
deleted file mode 100644
index e62b2f279a..0000000000
--- a/lib/hadoop-0.20.0/contrib/hdfsproxy/build.xml
+++ /dev/null
@@ -1,183 +0,0 @@
-<?xml version="1.0" ?>
-
-<!--
- Licensed to the Apache Software Foundation (ASF) under one or more
- contributor license agreements. See the NOTICE file distributed with
- this work for additional information regarding copyright ownership.
- The ASF licenses this file to You under the Apache License, Version 2.0
- (the "License"); you may not use this file except in compliance with
- the License. You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
--->
-
-<project name="hdfsproxy" default="jar">
- <property name="hdfsproxyVersion" value="1.0"/>
- <property name="final.name" value="${ant.project.name}-${hdfsproxyVersion}"/>
- <property name="bin.dir" value="${basedir}/bin"/>
- <property name="lib.dir" value="${basedir}/lib"/>
- <property name="conf.dir" value="${basedir}/conf"/>
- <property name="docs.dir" value="${basedir}/docs"/>
- <import file="../build-contrib.xml"/>
-
- <target name="jar" depends="compile" description="Create jar">
- <echo>
- Building the .jar files.
- </echo>
- <jar jarfile="${build.dir}/${final.name}.jar" basedir="${build.classes}" includes="org/apache/hadoop/hdfsproxy/**/*.class" >
- <manifest>
- <section name="org/apache/hadoop/hdfsproxy">
- <attribute name="Implementation-Title" value="HdfsProxy"/>
- <attribute name="Implementation-Version" value="${hdfsproxyVersion}"/>
- <attribute name="Implementation-Vendor" value="Apache"/>
- </section>
- </manifest>
-
- </jar>
- </target>
-
- <!-- ====================================================== -->
- <!-- Macro definitions -->
- <!-- ====================================================== -->
- <macrodef name="macro_tar" description="Worker Macro for tar">
- <attribute name="param.destfile"/>
- <element name="param.listofitems"/>
- <sequential>
- <tar compression="gzip" longfile="gnu"
- destfile="@{param.destfile}">
- <param.listofitems/>
- </tar>
- </sequential>
- </macrodef>
-
- <!-- ================================================================== -->
- <!-- D I S T R I B U T I O N -->
- <!-- ================================================================== -->
- <!-- -->
- <!-- ================================================================== -->
- <target name="local-package" depends="jar" description="Package in local build directory">
- <mkdir dir="${build.dir}/${final.name}"/>
- <mkdir dir="${build.dir}/${final.name}/logs"/>
- <copy todir="${build.dir}/${final.name}" includeEmptyDirs="false">
- <fileset dir="${build.dir}">
- <include name="*.jar" />
- <include name="*.war" />
- </fileset>
- </copy>
- <copy todir="${build.dir}/${final.name}/lib" includeEmptyDirs="false">
- <fileset dir="${common.ivy.lib.dir}">
- <include name="commons-logging-${commons-logging.version}"/>
- <include name="commons-logging-api-${commons-logging-api.version}.jar"/>
- <include name="junit-${junit.version}.jar"/>
- <include name="log4j-${log4j.version}.jar"/>
- <include name="slf4j-api-${slf4j-api.version}.jar"/>
- <include name="slf4j-log4j${slf4j-log4j.version}.jar"/>
- <include name="xmlenc-${xmlenc.version}.jar"/>
- <include name="jetty-${jetty.version}.jar"/>
- <include name="servlet-api-${servlet-api-2.5.version}.jar"/>
- <include name="core-${core.vesion}"/>
- </fileset>
- <fileset dir="${hadoop.root}/lib/jsp-${jsp.version}">
- <include name="jsp-${jsp.version}"/>
- <include name="jsp-api-${jsp-api.vesion}"/>
- </fileset>
- </copy>
-
- <copy todir="${build.dir}/${final.name}/lib" includeEmptyDirs="false">
- <fileset dir="${hadoop.root}/build">
- <include name="*-core.jar"/>
- <include name="*-tools.jar"/>
- </fileset>
- </copy>
-
- <copy todir="${build.dir}/${final.name}/bin">
- <fileset dir="${bin.dir}"/>
- </copy>
-
- <copy todir="${build.dir}/${final.name}/conf">
- <fileset dir="${conf.dir}"/>
- </copy>
-
- <copy todir="${build.dir}/${final.name}">
- <fileset dir="${basedir}">
- <include name="README" />
- <include name="build.xml" />
- <include name="*.txt" />
- </fileset>
- </copy>
-
- <copy todir="${build.dir}/${final.name}/src" includeEmptyDirs="true">
- <fileset dir="${src.dir}" excludes="**/*.template **/docs/build/**/*"/>
- </copy>
-
- <chmod perm="ugo+x" type="file" parallel="false">
- <fileset dir="${build.dir}/${final.name}/bin"/>
- </chmod>
-
- </target>
-
- <target name="package" depends="local-package" description="Build distribution">
- <mkdir dir="${dist.dir}/contrib/${name}"/>
- <copy todir="${dist.dir}/contrib/${name}">
- <fileset dir="${build.dir}/${final.name}">
- <exclude name="**/lib/**" />
- <exclude name="**/src/**" />
- </fileset>
- </copy>
- <chmod dir="${dist.dir}/contrib/${name}/bin" perm="a+x" includes="*"/>
- </target>
-
- <!-- ================================================================== -->
- <!-- Make release tarball -->
- <!-- ================================================================== -->
- <target name="tar" depends="local-package" description="Make release tarball">
- <macro_tar param.destfile="${build.dir}/${final.name}.tar.gz">
- <param.listofitems>
- <tarfileset dir="${build.dir}" mode="664">
- <exclude name="${final.name}/bin/*" />
- <include name="${final.name}/**" />
- </tarfileset>
- <tarfileset dir="${build.dir}" mode="755">
- <include name="${final.name}/bin/*" />
- </tarfileset>
- </param.listofitems>
- </macro_tar>
- </target>
-
- <target name="binary" depends="local-package" description="Make tarball without source and documentation">
- <macro_tar param.destfile="${build.dir}/${final.name}-bin.tar.gz">
- <param.listofitems>
- <tarfileset dir="${build.dir}" mode="664">
- <exclude name="${final.name}/bin/*" />
- <exclude name="${final.name}/src/**" />
- <exclude name="${final.name}/docs/**" />
- <include name="${final.name}/**" />
- </tarfileset>
- <tarfileset dir="${build.dir}" mode="755">
- <include name="${final.name}/bin/*" />
- </tarfileset>
- </param.listofitems>
- </macro_tar>
- </target>
-
- <!-- the unit test classpath -->
- <path id="test.classpath">
- <pathelement location="${build.test}" />
- <pathelement location="${hadoop.root}/build/test/classes"/>
- <pathelement location="${hadoop.root}/src/contrib/test"/>
- <pathelement location="${hadoop.root}/conf"/>
- <pathelement location="${hadoop.root}/build"/>
- <pathelement location="${hadoop.root}/build/classes"/>
- <pathelement location="${hadoop.root}/build/tools"/>
- <pathelement location="${build.examples}"/>
- <path refid="contrib-classpath"/>
- </path>
-
-
-</project>
diff --git a/lib/hadoop-0.20.0/contrib/hdfsproxy/conf/configuration.xsl b/lib/hadoop-0.20.0/contrib/hdfsproxy/conf/configuration.xsl
deleted file mode 100644
index 377cdbeb93..0000000000
--- a/lib/hadoop-0.20.0/contrib/hdfsproxy/conf/configuration.xsl
+++ /dev/null
@@ -1,24 +0,0 @@
-<?xml version="1.0"?>
-<xsl:stylesheet xmlns:xsl="http://www.w3.org/1999/XSL/Transform" version="1.0">
-<xsl:output method="html"/>
-<xsl:template match="configuration">
-<html>
-<body>
-<table border="1">
-<tr>
- <td>name</td>
- <td>value</td>
- <td>description</td>
-</tr>
-<xsl:for-each select="property">
-<tr>
- <td><a name="{name}"><xsl:value-of select="name"/></a></td>
- <td><xsl:value-of select="value"/></td>
- <td><xsl:value-of select="description"/></td>
-</tr>
-</xsl:for-each>
-</table>
-</body>
-</html>
-</xsl:template>
-</xsl:stylesheet>
diff --git a/lib/hadoop-0.20.0/contrib/hdfsproxy/conf/hdfsproxy-default.xml b/lib/hadoop-0.20.0/contrib/hdfsproxy/conf/hdfsproxy-default.xml
deleted file mode 100644
index 0d2a006c8e..0000000000
--- a/lib/hadoop-0.20.0/contrib/hdfsproxy/conf/hdfsproxy-default.xml
+++ /dev/null
@@ -1,59 +0,0 @@
-<?xml version="1.0"?>
-<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
-
-<!-- Put hdfsproxy specific properties in this file. -->
-
-<configuration>
-
-<property>
- <name>hdfsproxy.https.address</name>
- <value>0.0.0.0:50479</value>
- <description>the SSL port that hdfsproxy listens on
- </description>
-</property>
-
-<property>
- <name>hdfsproxy.hosts</name>
- <value>hdfsproxy-hosts</value>
- <description>location of hdfsproxy-hosts file
- </description>
-</property>
-
-<property>
- <name>hdfsproxy.dfs.namenode.address</name>
- <value></value>
- <description>namenode address of the HDFS cluster being proxied
- </description>
-</property>
-
-<property>
- <name>hdfsproxy.https.server.keystore.resource</name>
- <value>ssl-server.xml</value>
- <description>location of the resource from which ssl server keystore
- information will be extracted
- </description>
-</property>
-
-<property>
- <name>hdfsproxy.user.permissions.file.location</name>
- <value>user-permissions.xml</value>
- <description>location of the user permissions file
- </description>
-</property>
-
-<property>
- <name>hdfsproxy.user.certs.file.location</name>
- <value>user-certs.xml</value>
- <description>location of the user certs file
- </description>
-</property>
-
-<property>
- <name>hdfsproxy.ugi.cache.ugi.lifetime</name>
- <value>15</value>
- <description> The lifetime (in minutes) of a cached ugi
- </description>
-</property>
-
-</configuration>
-
diff --git a/lib/hadoop-0.20.0/contrib/hdfsproxy/conf/hdfsproxy-env.sh b/lib/hadoop-0.20.0/contrib/hdfsproxy/conf/hdfsproxy-env.sh
deleted file mode 100644
index a0ff7a5d27..0000000000
--- a/lib/hadoop-0.20.0/contrib/hdfsproxy/conf/hdfsproxy-env.sh
+++ /dev/null
@@ -1,44 +0,0 @@
-# Set HdfsProxy-specific environment variables here.
-
-# The only required environment variable is JAVA_HOME. All others are
-# optional. When running a distributed configuration it is best to
-# set JAVA_HOME in this file, so that it is correctly defined on
-# remote nodes.
-
-# The java implementation to use. Required.
-# export JAVA_HOME=/usr/lib/j2sdk1.5-sun
-
-# Extra Java CLASSPATH elements. Optional.
-# export HDFSPROXY_CLASSPATH=
-
-# The maximum amount of heap to use, in MB. Default is 1000.
-# export HDFSPROXY_HEAPSIZE=2000
-
-# Extra Java runtime options. Empty by default.
-# export HDFSPROXY_OPTS=
-
-# Extra ssh options. Empty by default.
-# export HDFSPROXY_SSH_OPTS="-o ConnectTimeout=1 -o SendEnv=HDFSPROXY_CONF_DIR"
-
-# Where log files are stored. $HDFSPROXY_HOME/logs by default.
-# export HDFSPROXY_LOG_DIR=${HDFSPROXY_HOME}/logs
-
-# File naming remote slave hosts. $HDFSPROXY_HOME/conf/slaves by default.
-# export HDFSPROXY_SLAVES=${HDFSPROXY_HOME}/conf/slaves
-
-# host:path where hdfsproxy code should be rsync'd from. Unset by default.
-# export HDFSPROXY_MASTER=master:/home/$USER/src/hdfsproxy
-
-# Seconds to sleep between slave commands. Unset by default. This
-# can be useful in large clusters, where, e.g., slave rsyncs can
-# otherwise arrive faster than the master can service them.
-# export HDFSPROXY_SLAVE_SLEEP=0.1
-
-# The directory where pid files are stored. /tmp by default.
-# export HDFSPROXY_PID_DIR=/var/hdfsproxy/pids
-
-# A string representing this instance of hdfsproxy. $USER by default.
-# export HDFSPROXY_IDENT_STRING=$USER
-
-# The scheduling priority for daemon processes. See 'man nice'.
-# export HDFSPROXY_NICENESS=10
diff --git a/lib/hadoop-0.20.0/contrib/hdfsproxy/conf/hdfsproxy-env.sh.template b/lib/hadoop-0.20.0/contrib/hdfsproxy/conf/hdfsproxy-env.sh.template
deleted file mode 100644
index a0ff7a5d27..0000000000
--- a/lib/hadoop-0.20.0/contrib/hdfsproxy/conf/hdfsproxy-env.sh.template
+++ /dev/null
@@ -1,44 +0,0 @@
-# Set HdfsProxy-specific environment variables here.
-
-# The only required environment variable is JAVA_HOME. All others are
-# optional. When running a distributed configuration it is best to
-# set JAVA_HOME in this file, so that it is correctly defined on
-# remote nodes.
-
-# The java implementation to use. Required.
-# export JAVA_HOME=/usr/lib/j2sdk1.5-sun
-
-# Extra Java CLASSPATH elements. Optional.
-# export HDFSPROXY_CLASSPATH=
-
-# The maximum amount of heap to use, in MB. Default is 1000.
-# export HDFSPROXY_HEAPSIZE=2000
-
-# Extra Java runtime options. Empty by default.
-# export HDFSPROXY_OPTS=
-
-# Extra ssh options. Empty by default.
-# export HDFSPROXY_SSH_OPTS="-o ConnectTimeout=1 -o SendEnv=HDFSPROXY_CONF_DIR"
-
-# Where log files are stored. $HDFSPROXY_HOME/logs by default.
-# export HDFSPROXY_LOG_DIR=${HDFSPROXY_HOME}/logs
-
-# File naming remote slave hosts. $HDFSPROXY_HOME/conf/slaves by default.
-# export HDFSPROXY_SLAVES=${HDFSPROXY_HOME}/conf/slaves
-
-# host:path where hdfsproxy code should be rsync'd from. Unset by default.
-# export HDFSPROXY_MASTER=master:/home/$USER/src/hdfsproxy
-
-# Seconds to sleep between slave commands. Unset by default. This
-# can be useful in large clusters, where, e.g., slave rsyncs can
-# otherwise arrive faster than the master can service them.
-# export HDFSPROXY_SLAVE_SLEEP=0.1
-
-# The directory where pid files are stored. /tmp by default.
-# export HDFSPROXY_PID_DIR=/var/hdfsproxy/pids
-
-# A string representing this instance of hdfsproxy. $USER by default.
-# export HDFSPROXY_IDENT_STRING=$USER
-
-# The scheduling priority for daemon processes. See 'man nice'.
-# export HDFSPROXY_NICENESS=10
diff --git a/lib/hadoop-0.20.0/contrib/hdfsproxy/conf/hdfsproxy-hosts b/lib/hadoop-0.20.0/contrib/hdfsproxy/conf/hdfsproxy-hosts
deleted file mode 100644
index 2fbb50c4a8..0000000000
--- a/lib/hadoop-0.20.0/contrib/hdfsproxy/conf/hdfsproxy-hosts
+++ /dev/null
@@ -1 +0,0 @@
-localhost
diff --git a/lib/hadoop-0.20.0/contrib/hdfsproxy/conf/log4j.properties b/lib/hadoop-0.20.0/contrib/hdfsproxy/conf/log4j.properties
deleted file mode 100644
index 2520ab3795..0000000000
--- a/lib/hadoop-0.20.0/contrib/hdfsproxy/conf/log4j.properties
+++ /dev/null
@@ -1,61 +0,0 @@
-# Define some default values that can be overridden by system properties
-hdfsproxy.root.logger=INFO,console
-hdfsproxy.log.dir=.
-hdfsproxy.log.file=hdfsproxy.log
-
-# Define the root logger to the system property "hdfsproxy.root.logger".
-log4j.rootLogger=${hdfsproxy.root.logger}
-
-# Logging Threshold
-log4j.threshhold=ALL
-
-#
-# Daily Rolling File Appender
-#
-
-log4j.appender.DRFA=org.apache.log4j.DailyRollingFileAppender
-log4j.appender.DRFA.File=${hdfsproxy.log.dir}/${hdfsproxy.log.file}
-
-# Rollver at midnight
-log4j.appender.DRFA.DatePattern=.yyyy-MM-dd
-
-# 30-day backup
-#log4j.appender.DRFA.MaxBackupIndex=30
-log4j.appender.DRFA.layout=org.apache.log4j.PatternLayout
-
-# Pattern format: Date LogLevel LoggerName LogMessage
-log4j.appender.DRFA.layout.ConversionPattern=%d{ISO8601} %p %c: %m%n
-# Debugging Pattern format
-#log4j.appender.DRFA.layout.ConversionPattern=%d{ISO8601} %-5p %c{2} (%F:%M(%L)) - %m%n
-
-
-#
-# console
-# Add "console" to rootlogger above if you want to use this
-#
-
-log4j.appender.console=org.apache.log4j.ConsoleAppender
-log4j.appender.console.target=System.err
-log4j.appender.console.layout=org.apache.log4j.PatternLayout
-log4j.appender.console.layout.ConversionPattern=%d{yy/MM/dd HH:mm:ss} %p %c{2}: %m%n
-
-#
-# Rolling File Appender
-#
-
-#log4j.appender.RFA=org.apache.log4j.RollingFileAppender
-#log4j.appender.RFA.File=${hdfsproxy.log.dir}/${hdfsproxy.log.file}
-
-# Logfile size and and 30-day backups
-#log4j.appender.RFA.MaxFileSize=1MB
-#log4j.appender.RFA.MaxBackupIndex=30
-
-#log4j.appender.RFA.layout=org.apache.log4j.PatternLayout
-#log4j.appender.RFA.layout.ConversionPattern=%d{ISO8601} %-5p %c{2} - %m%n
-#log4j.appender.RFA.layout.ConversionPattern=%d{ISO8601} %-5p %c{2} (%F:%M(%L)) - %m%n
-
-# Custom Logging levels
-
-#log4j.logger.org.apache.hadoop.hdfsproxy.HttpsProxy=DEBUG
-#log4j.logger.org.apache.hadoop.hdfsproxy.ProxyFilter=DEBUG
-
diff --git a/lib/hadoop-0.20.0/contrib/hdfsproxy/conf/user-certs.xml b/lib/hadoop-0.20.0/contrib/hdfsproxy/conf/user-certs.xml
deleted file mode 100644
index f572a55294..0000000000
--- a/lib/hadoop-0.20.0/contrib/hdfsproxy/conf/user-certs.xml
+++ /dev/null
@@ -1,26 +0,0 @@
-<?xml version="1.0"?>
-<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
-
-<!--
-
-This file defines the mappings from username to comma seperated list
-of certificate serial numbers that the user is allowed to use. One mapping
-per user. Wildcard characters, such as "*" and "?", are not recognized.
-Any leading or trailing whitespaces are stripped/ignored. Note that user
-"Admin" is the special hdfsproxy admin user. To make a user an admin, add
-the user's certificate serial number to user "Admin". Normal users cannot
-have "Admin" as username. Usernames can only comprise of 0-9a-zA-Z and
-underscore.
-
--->
-
-<configuration>
-
-<property>
- <name>Admin</name>
- <value></value>
- <description> Special hdfsproxy admin user
- </description>
-</property>
-
-</configuration>
diff --git a/lib/hadoop-0.20.0/contrib/hdfsproxy/conf/user-permissions.xml b/lib/hadoop-0.20.0/contrib/hdfsproxy/conf/user-permissions.xml
deleted file mode 100644
index b7373751bd..0000000000
--- a/lib/hadoop-0.20.0/contrib/hdfsproxy/conf/user-permissions.xml
+++ /dev/null
@@ -1,28 +0,0 @@
-<?xml version="1.0"?>
-<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
-
-<!--
-
-This file defines the mappings from username to comma seperated list
-of directories/files that the user is allowed to use. One mapping
-per user. Wildcard characters, such as "*" and "?", are not recognized.
-For example, to match "/output" directory, one can use "/output" or
-"/output/", but not "/output/*". Any leading or trailing whitespaces
-in the name field are stripped/ignored, while only leading whitespaces
-in the value field are. Note that the special hdfsproxy admin user "Admin"
-doesn't automatically have access to any files, unless explicitly
-specified in this file. Usernames can only comprise of 0-9a-zA-Z and
-underscore.
-
--->
-
-<configuration>
-
-<property>
- <name></name>
- <value></value>
- <description>
- </description>
-</property>
-
-</configuration>
diff --git a/lib/hadoop-0.20.0/contrib/hdfsproxy/hdfsproxy-1.0.jar b/lib/hadoop-0.20.0/contrib/hdfsproxy/hdfsproxy-1.0.jar
deleted file mode 100644
index a313391dfb..0000000000
--- a/lib/hadoop-0.20.0/contrib/hdfsproxy/hdfsproxy-1.0.jar
+++ /dev/null
Binary files differ
diff --git a/lib/hadoop-0.20.0/contrib/index/hadoop-0.20.0-index.jar b/lib/hadoop-0.20.0/contrib/index/hadoop-0.20.0-index.jar
deleted file mode 100644
index f1f850fcd3..0000000000
--- a/lib/hadoop-0.20.0/contrib/index/hadoop-0.20.0-index.jar
+++ /dev/null
Binary files differ
diff --git a/lib/hadoop-0.20.0/contrib/streaming/hadoop-0.20.0-streaming.jar b/lib/hadoop-0.20.0/contrib/streaming/hadoop-0.20.0-streaming.jar
deleted file mode 100644
index 84251e3a3c..0000000000
--- a/lib/hadoop-0.20.0/contrib/streaming/hadoop-0.20.0-streaming.jar
+++ /dev/null
Binary files differ
diff --git a/lib/hadoop-0.20.0/contrib/thriftfs/hadoop-0.20.0-thriftfs.jar b/lib/hadoop-0.20.0/contrib/thriftfs/hadoop-0.20.0-thriftfs.jar
deleted file mode 100644
index bf10c05e1d..0000000000
--- a/lib/hadoop-0.20.0/contrib/thriftfs/hadoop-0.20.0-thriftfs.jar
+++ /dev/null
Binary files differ
diff --git a/lib/hadoop-0.20.0/contrib/vaidya/bin/vaidya.sh b/lib/hadoop-0.20.0/contrib/vaidya/bin/vaidya.sh
deleted file mode 100755
index ada6715342..0000000000
--- a/lib/hadoop-0.20.0/contrib/vaidya/bin/vaidya.sh
+++ /dev/null
@@ -1,47 +0,0 @@
-#!/bin/sh
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements. See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License. You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-this="$0"
-while [ -h "$this" ]; do
- ls=`ls -ld "$this"`
- link=`expr "$ls" : '.*-> \(.*\)$'`
- if expr "$link" : '.*/.*' > /dev/null; then
- this="$link"
- else
- this=`dirname "$this"`/"$link"
- fi
-done
-
-# convert relative path to absolute path
-bin=`dirname "$this"`
-script=`basename "$this"`
-bin=`cd "$bin"; pwd`
-this="$bin/$script"
-
-# Check if HADOOP_HOME AND JAVA_HOME is set.
-if [ -z $HADOOP_HOME ] ; then
- echo "HADOOP_HOME environment variable not defined"
- exit -1;
-fi
-
-if [ -z $JAVA_HOME ] ; then
- echo "JAVA_HOME environment variable not defined"
- exit -1;
-fi
-
-hadoopVersion=`$HADOOP_HOME/bin/hadoop version | awk 'BEGIN { RS = "" ; FS = "\n" } ; { print $1 }' | awk '{print $2}'`
-
-$JAVA_HOME/bin/java -classpath $HADOOP_HOME/hadoop-${hadoopVersion}-core.jar:$HADOOP_HOME/contrib/vaidya/hadoop-${hadoopVersion}-vaidya.jar:$HADOOP_HOME/lib/commons-logging-1.0.4.jar:${CLASSPATH} org.apache.hadoop.vaidya.postexdiagnosis.PostExPerformanceDiagnoser $@
diff --git a/lib/hadoop-0.20.0/contrib/vaidya/conf/postex_diagnosis_tests.xml b/lib/hadoop-0.20.0/contrib/vaidya/conf/postex_diagnosis_tests.xml
deleted file mode 100644
index f30d5d9cc8..0000000000
--- a/lib/hadoop-0.20.0/contrib/vaidya/conf/postex_diagnosis_tests.xml
+++ /dev/null
@@ -1,104 +0,0 @@
-<?xml version="1.0" encoding="ISO-8859-1"?>
-<!--
-**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
-**
- -->
-<!-- This is a diagnostic test configuration file. Diagnostic test driver
- reads this file to get the list of tests and their configuration information
-
- Title : Provides brief description of the test
- ClassName : Provides the fully qualified java class name that implements the test condition
- Description : Provides detailed information about the test describing how it checks for a specific
- performance problem.
- SuccessThreshold : (value between [0..1])
- : Evaluation of a diagnostic test returns its level of impact on the job
- performance. If impact value [between 0..1] is equal or greater than the
- success threshold, means rule has detected the problem (TEST POSITIVE) else
- rule has passed the test (TEST NEGATIVE). The impact level is calculated and
- returned by each test's evaluate method. For tests that are boolean in nature
- the impact level is either 0 or 1 and success threshold should be 1.
- Importance : Indicates relative importance of this diagnostic test among the set of
- diagnostic rules defined in this file. Three declarative values that
- can be assigned are High, Medium or Low
- Prescription : This is an optional element to store the advice to be included in the report upon test failure
- This is overwritten in the report by any advice/prescription text returned by getPrescription method of
- DiagnosticTest.
- InputElement : Input element is made available to the diagnostic test for it to interpret and accept
- any parameters specific to the test. These test specific parameters are used to configure
- the tests without changing the java code.
--->
-<PostExPerformanceDiagnosisTests>
-
-<DiagnosticTest>
- <Title><![CDATA[Balanaced Reduce Partitioning]]></Title>
- <ClassName><![CDATA[org.apache.hadoop.vaidya.postexdiagnosis.tests.BalancedReducePartitioning]]></ClassName>
- <Description><![CDATA[This rule tests as to how well the input to reduce tasks is balanced]]></Description>
- <Importance><![CDATA[High]]></Importance>
- <SuccessThreshold><![CDATA[0.20]]></SuccessThreshold>
- <Prescription><![CDATA[advice]]></Prescription>
- <InputElement>
- <PercentReduceRecords><![CDATA[0.85]]></PercentReduceRecords>
- </InputElement>
-</DiagnosticTest>
-
-<DiagnosticTest>
- <Title><![CDATA[Impact of Map tasks Re-Execution]]></Title>
- <ClassName><![CDATA[org.apache.hadoop.vaidya.postexdiagnosis.tests.MapsReExecutionImpact]]></ClassName>
- <Description><![CDATA[This test rule checks percentage of map task re-execution impacting the job performance]]></Description>
- <Importance><![CDATA[High]]></Importance>
- <SuccessThreshold><![CDATA[0.40]]></SuccessThreshold>
- <Prescription><![CDATA[default advice]]></Prescription>
- <InputElement>
- </InputElement>
-</DiagnosticTest>
-
-<DiagnosticTest>
- <Title><![CDATA[Impact of Reduce tasks Re-Execution]]></Title>
- <ClassName><![CDATA[org.apache.hadoop.vaidya.postexdiagnosis.tests.ReducesReExecutionImpact]]></ClassName>
- <Description><![CDATA[This test rule checks percentage of reduce task re-execution impacting the job performance]]></Description>
- <Importance><![CDATA[High]]></Importance>
- <SuccessThreshold><![CDATA[0.40]]></SuccessThreshold>
- <Prescription><![CDATA[default advice]]></Prescription>
- <InputElement>
- </InputElement>
-</DiagnosticTest>
-
-<DiagnosticTest>
- <Title><![CDATA[Map and/or Reduce tasks reading HDFS data as a side effect]]></Title>
- <ClassName><![CDATA[org.apache.hadoop.vaidya.postexdiagnosis.tests.ReadingHDFSFilesAsSideEffect]]></ClassName>
- <Description><![CDATA[This test rule checks if map/reduce tasks are reading data from HDFS as a side effect. More the data read as a side effect can potentially be a bottleneck across parallel execution of map/reduce tasks.]]></Description>
- <Importance><![CDATA[High]]></Importance>
- <SuccessThreshold><![CDATA[0.05]]></SuccessThreshold>
- <Prescription><![CDATA[default advice]]></Prescription>
- <InputElement>
- </InputElement>
-</DiagnosticTest>
-
-<DiagnosticTest>
- <Title><![CDATA[Map side disk spill]]></Title>
- <ClassName><![CDATA[org.apache.hadoop.vaidya.postexdiagnosis.tests.MapSideDiskSpill]]></ClassName>
- <Description><![CDATA[This test rule checks if Map tasks are spilling the data on to the local disk during the map side sorting due to insufficient sort buffer size. The impact is calculated as ratio between local bytes written to map output bytes. Impact is normalized using NormalizationFactor given below and any value greater than or equal to normalization factor is treated as maximum (i.e. 1). ]]></Description>
- <Importance><![CDATA[Low]]></Importance>
- <SuccessThreshold><![CDATA[0.3]]></SuccessThreshold>
- <Prescription><![CDATA[default advice]]></Prescription>
- <InputElement>
- <NormalizationFactor>3.0</NormalizationFactor>
- </InputElement>
-</DiagnosticTest>
-
-</PostExPerformanceDiagnosisTests>
diff --git a/lib/hadoop-0.20.0/contrib/vaidya/hadoop-0.20.0-vaidya.jar b/lib/hadoop-0.20.0/contrib/vaidya/hadoop-0.20.0-vaidya.jar
deleted file mode 100644
index 534b18d974..0000000000
--- a/lib/hadoop-0.20.0/contrib/vaidya/hadoop-0.20.0-vaidya.jar
+++ /dev/null
Binary files differ