Skip to content

Commit dab8d48

Browse files
committed
HDFS-1963. Create RPM and Debian packages for HDFS. Changes deployment
layout to be consistent across the binary tgz, rpm, and deb. (Eric Yang via omalley) git-svn-id: https://svn.apache.org/repos/asf/hadoop/hdfs/trunk@1128393 13f79535-47bb-0310-9956-ffa450edef68
1 parent a8cacc6 commit dab8d48

36 files changed

+1250
-127
lines changed

CHANGES.txt

Lines changed: 7 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -101,8 +101,8 @@ Trunk (unreleased changes)
101101
HDFS-1663. Federation: Rename getPoolId() everywhere to
102102
getBlockPoolId() (tanping via boryas)
103103

104-
HDFS-1652. FederationL Add support for multiple namenodes in MiniDFSCluster.
105-
(suresh)
104+
HDFS-1652. FederationL Add support for multiple namenodes in
105+
MiniDFSCluster. (suresh)
106106

107107
HDFS-1672. Federation: refactor stopDatanode(name) to work
108108
with multiple Block Pools (boryas)
@@ -243,8 +243,8 @@ Trunk (unreleased changes)
243243

244244
HDFS-1754. Federation: testFsck fails. (boryas)
245245

246-
HDFS-1755. Federation: The BPOfferService must always connect to namenode as
247-
the login user. (jitendra)
246+
HDFS-1755. Federation: The BPOfferService must always connect to namenode
247+
as the login user. (jitendra)
248248

249249
HDFS-1675. Support transferring RBW between datanodes. (szetszwo)
250250

@@ -281,6 +281,9 @@ Trunk (unreleased changes)
281281
HDFS-1914. Federation: namenode storage directories must be configurable
282282
specific to name service. (suresh)
283283

284+
HDFS-1963. Create RPM and Debian packages for HDFS. Changes deployment
285+
layout to be consistent across the binary tgz, rpm, and deb.
286+
(Eric Yang via omalley)
284287

285288
IMPROVEMENTS
286289

bin/distribute-exclude.sh

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -36,7 +36,7 @@
3636
bin=`dirname "$0"`
3737
bin=`cd "$bin"; pwd`
3838

39-
. "$bin/hdfs-config.sh"
39+
. "$bin/../libexec/hdfs-config.sh"
4040

4141
if [ "$1" = '' ] ; then
4242
"Error: please specify local exclude file as a first argument"
@@ -50,8 +50,8 @@ if [ ! -f "$excludeFilenameLocal" ] ; then
5050
exit 1
5151
fi
5252

53-
namenodes=$("$HADOOP_HDFS_HOME/bin/hdfs" getconf -namenodes)
54-
excludeFilenameRemote=$("$HADOOP_HDFS_HOME/bin/hdfs" getconf -excludeFile)
53+
namenodes=$("$HADOOP_PREFIX/bin/hdfs" getconf -namenodes)
54+
excludeFilenameRemote=$("$HADOOP_PREFIX/bin/hdfs" getconf -excludeFile)
5555

5656
if [ "$excludeFilenameRemote" = '' ] ; then
5757
echo \

bin/hdfs

Lines changed: 16 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -15,10 +15,11 @@
1515
# See the License for the specific language governing permissions and
1616
# limitations under the License.
1717

18-
bin=`dirname "$0"`
18+
bin=`which $0`
19+
bin=`dirname ${bin}`
1920
bin=`cd "$bin"; pwd`
2021

21-
. "$bin"/hdfs-config.sh
22+
. "$bin"/../libexec/hdfs-config.sh
2223

2324
function print_usage(){
2425
echo "Usage: hdfs [--config confdir] COMMAND"
@@ -107,29 +108,29 @@ else
107108
fi
108109

109110
# for developers, add hdfs classes to CLASSPATH
110-
if [ -d "$HADOOP_HDFS_HOME/build/classes" ]; then
111-
CLASSPATH=${CLASSPATH}:$HADOOP_HDFS_HOME/build/classes
111+
if [ -d "$HADOOP_PREFIX/build/classes" ]; then
112+
CLASSPATH=${CLASSPATH}:$HADOOP_PREFIX/build/classes
112113
fi
113-
if [ -d "$HADOOP_HDFS_HOME/build/webapps" ]; then
114-
CLASSPATH=${CLASSPATH}:$HADOOP_HDFS_HOME/build
114+
if [ -d "$HADOOP_PREFIX/build/webapps" ]; then
115+
CLASSPATH=${CLASSPATH}:$HADOOP_PREFIX/build
115116
fi
116-
if [ -d "$HADOOP_HDFS_HOME/build/test/classes" ]; then
117-
CLASSPATH=${CLASSPATH}:$HADOOP_HDFS_HOME/build/test/classes
117+
if [ -d "$HADOOP_PREFIX/build/test/classes" ]; then
118+
CLASSPATH=${CLASSPATH}:$HADOOP_PREFIX/build/test/classes
118119
fi
119-
if [ -d "$HADOOP_HDFS_HOME/build/tools" ]; then
120-
CLASSPATH=${CLASSPATH}:$HADOOP_HDFS_HOME/build/tools
120+
if [ -d "$HADOOP_PREFIX/build/tools" ]; then
121+
CLASSPATH=${CLASSPATH}:$HADOOP_PREFIX/build/tools
121122
fi
122123

123124
# for releases, add core hdfs jar & webapps to CLASSPATH
124-
if [ -d "$HADOOP_HDFS_HOME/webapps" ]; then
125-
CLASSPATH=${CLASSPATH}:$HADOOP_HDFS_HOME
125+
if [ -d "$HADOOP_PREFIX/share/hadoop/hdfs/webapps" ]; then
126+
CLASSPATH=${CLASSPATH}:$HADOOP_PREFIX/share/hadoop/hdfs
126127
fi
127-
for f in $HADOOP_HDFS_HOME/hadoop-hdfs-*.jar; do
128+
for f in $HADOOP_PREFIX/share/hadoop-hdfs/*.jar; do
128129
CLASSPATH=${CLASSPATH}:$f;
129130
done
130131

131132
# add libs to CLASSPATH
132-
for f in $HADOOP_HDFS_HOME/lib/*.jar; do
133+
for f in $HADOOP_PREFIX/lib/*.jar; do
133134
CLASSPATH=${CLASSPATH}:$f;
134135
done
135136

@@ -146,7 +147,7 @@ if [ "$starting_secure_dn" = "true" ]; then
146147
HADOOP_SECURE_DN_PID="$HADOOP_PID_DIR/hadoop_secure_dn.pid"
147148
fi
148149

149-
exec "$HADOOP_HDFS_HOME/bin/jsvc" \
150+
exec "$HADOOP_PREFIX/bin/jsvc" \
150151
-Dproc_$COMMAND -outfile "$HADOOP_LOG_DIR/jsvc.out" \
151152
-errfile "$HADOOP_LOG_DIR/jsvc.err" \
152153
-pidfile "$HADOOP_SECURE_DN_PID" \

bin/hdfs-config.sh

Lines changed: 5 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -18,15 +18,14 @@
1818
# included in all the hdfs scripts with source command
1919
# should not be executed directly
2020

21-
bin=`dirname "$0"`
21+
bin=`which "$0"`
22+
bin=`dirname "${bin}"`
2223
bin=`cd "$bin"; pwd`
2324

24-
export HADOOP_HDFS_HOME="${HADOOP_HDFS_HOME:-$bin/..}"
25+
export HADOOP_PREFIX="${HADOOP_PREFIX:-$bin/..}"
2526

26-
if [ -d "${HADOOP_COMMON_HOME}" ]; then
27-
. "$HADOOP_COMMON_HOME"/bin/hadoop-config.sh
28-
elif [ -d "${HADOOP_HOME}" ]; then
29-
. "$HADOOP_HOME"/bin/hadoop-config.sh
27+
if [ -d "$bin" ]; then
28+
. $bin/../libexec/hadoop-config.sh
3029
elif [ -e "${HADOOP_HDFS_HOME}"/bin/hadoop-config.sh ]; then
3130
. "$HADOOP_HDFS_HOME"/bin/hadoop-config.sh
3231
else

bin/refresh-namenodes.sh

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -23,13 +23,13 @@
2323
bin=`dirname "$0"`
2424
bin=`cd "$bin"; pwd`
2525

26-
. "$bin/hdfs-config.sh"
26+
. "$bin/../libexec/hdfs-config.sh"
2727

28-
namenodes=$("$HADOOP_HDFS_HOME/bin/hdfs" getconf -namenodes)
28+
namenodes=$("$HADOOP_PREFIX/bin/hdfs" getconf -namenodes)
2929

3030
for namenode in $namenodes ; do
3131
echo "Refreshing namenode [$namenode]"
32-
"$HADOOP_HDFS_HOME/bin/hdfs" dfsadmin -refreshNodes
32+
"$HADOOP_PREFIX/bin/hdfs" dfsadmin -refreshNodes
3333
if [ "$?" != '0' ] ; then errorFlag='1' ; fi
3434
done
3535

bin/start-balancer.sh

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -18,8 +18,8 @@
1818
bin=`dirname "${BASH_SOURCE-$0}"`
1919
bin=`cd "$bin"; pwd`
2020

21-
. "$bin"/hdfs-config.sh
21+
. "$bin"/../libexec/hdfs-config.sh
2222

2323
# Start balancer daemon.
2424

25-
"$HADOOP_COMMON_HOME"/bin/hadoop-daemon.sh --config $HADOOP_CONF_DIR --script "$bin"/hdfs start balancer $@
25+
"$HADOOP_PREFIX"/bin/hadoop-daemon.sh --config $HADOOP_CONF_DIR --script "$bin"/hdfs start balancer $@

bin/start-dfs.sh

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -25,7 +25,7 @@ usage="Usage: start-dfs.sh [-upgrade|-rollback]"
2525
bin=`dirname "${BASH_SOURCE-$0}"`
2626
bin=`cd "$bin"; pwd`
2727

28-
. "$bin/hdfs-config.sh"
28+
. "$bin"/../libexec/hdfs-config.sh
2929

3030
# get arguments
3131
if [ $# -ge 1 ]; then
@@ -47,11 +47,11 @@ fi
4747
#---------------------------------------------------------
4848
# namenodes
4949

50-
NAMENODES=$($HADOOP_HDFS_HOME/bin/hdfs getconf -namenodes)
50+
NAMENODES=$($HADOOP_PREFIX/bin/hdfs getconf -namenodes)
5151

5252
echo "Starting namenodes on [$NAMENODES]"
5353

54-
"$HADOOP_COMMON_HOME/bin/hadoop-daemons.sh" \
54+
"$HADOOP_PREFIX/bin/hadoop-daemons.sh" \
5555
--config "$HADOOP_CONF_DIR" \
5656
--hostnames "$NAMENODES" \
5757
--script "$bin/hdfs" start namenode $nameStartOpt
@@ -64,7 +64,7 @@ if [ -n "$HADOOP_SECURE_DN_USER" ]; then
6464
"Attempting to start secure cluster, skipping datanodes. " \
6565
"Run start-secure-dns.sh as root to complete startup."
6666
else
67-
"$HADOOP_COMMON_HOME/bin/hadoop-daemons.sh" \
67+
"$HADOOP_PREFIX/bin/hadoop-daemons.sh" \
6868
--config "$HADOOP_CONF_DIR" \
6969
--script "$bin/hdfs" start datanode $dataStartOpt
7070
fi
@@ -74,7 +74,7 @@ fi
7474

7575
# if there are no secondary namenodes configured it returns
7676
# 0.0.0.0 or empty string
77-
SECONDARY_NAMENODES=$($HADOOP_HDFS_HOME/bin/hdfs getconf -secondarynamenodes 2>&-)
77+
SECONDARY_NAMENODES=$($HADOOP_PREFIX/bin/hdfs getconf -secondarynamenodes 2>&-)
7878
SECONDARY_NAMENODES=${SECONDARY_NAMENODES:='0.0.0.0'}
7979

8080
if [ "$SECONDARY_NAMENODES" = '0.0.0.0' ] ; then
@@ -84,7 +84,7 @@ if [ "$SECONDARY_NAMENODES" = '0.0.0.0' ] ; then
8484
else
8585
echo "Starting secondary namenodes [$SECONDARY_NAMENODES]"
8686

87-
"$HADOOP_COMMON_HOME/bin/hadoop-daemons.sh" \
87+
"$HADOOP_PREFIX/bin/hadoop-daemons.sh" \
8888
--config "$HADOOP_CONF_DIR" \
8989
--hostnames "$SECONDARY_NAMENODES" \
9090
--script "$bin/hdfs" start secondarynamenode

bin/start-secure-dns.sh

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -22,10 +22,10 @@ usage="Usage (run as root in order to start secure datanodes): start-secure-dns.
2222
bin=`dirname "${BASH_SOURCE-$0}"`
2323
bin=`cd "$bin"; pwd`
2424

25-
. "$bin"/hdfs-config.sh
25+
. "$bin"/../libexec/hdfs-config.sh
2626

2727
if [ "$EUID" -eq 0 ] && [ -n "$HADOOP_SECURE_DN_USER" ]; then
28-
"$HADOOP_COMMON_HOME"/bin/hadoop-daemons.sh --config $HADOOP_CONF_DIR --script "$bin"/hdfs start datanode $dataStartOpt
28+
"$HADOOP_PREFIX"/bin/hadoop-daemons.sh --config $HADOOP_CONF_DIR --script "$bin"/hdfs start datanode $dataStartOpt
2929
else
3030
echo $usage
3131
fi

bin/stop-balancer.sh

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -18,9 +18,9 @@
1818
bin=`dirname "${BASH_SOURCE-$0}"`
1919
bin=`cd "$bin"; pwd`
2020

21-
. "$bin"/hdfs-config.sh
21+
. "$bin"/../libexec/hdfs-config.sh
2222

2323
# Stop balancer daemon.
2424
# Run this on the machine where the balancer is running
2525

26-
"$HADOOP_COMMON_HOME"/bin/hadoop-daemon.sh --config $HADOOP_CONF_DIR --script "$bin"/hdfs stop balancer
26+
"$HADOOP_PREFIX"/bin/hadoop-daemon.sh --config $HADOOP_CONF_DIR --script "$bin"/hdfs stop balancer

bin/stop-dfs.sh

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -18,16 +18,16 @@
1818
bin=`dirname "${BASH_SOURCE-$0}"`
1919
bin=`cd "$bin"; pwd`
2020

21-
. "$bin"/hdfs-config.sh
21+
. "$bin"/../libexec/hdfs-config.sh
2222

2323
#---------------------------------------------------------
2424
# namenodes
2525

26-
NAMENODES=$($HADOOP_HDFS_HOME/bin/hdfs getconf -namenodes)
26+
NAMENODES=$($HADOOP_PREFIX/bin/hdfs getconf -namenodes)
2727

2828
echo "Stopping namenodes on [$NAMENODES]"
2929

30-
"$HADOOP_COMMON_HOME/bin/hadoop-daemons.sh" \
30+
"$HADOOP_PREFIX/bin/hadoop-daemons.sh" \
3131
--config "$HADOOP_CONF_DIR" \
3232
--hostnames "$NAMENODES" \
3333
--script "$bin/hdfs" stop namenode
@@ -40,7 +40,7 @@ if [ -n "$HADOOP_SECURE_DN_USER" ]; then
4040
"Attempting to stop secure cluster, skipping datanodes. " \
4141
"Run stop-secure-dns.sh as root to complete shutdown."
4242
else
43-
"$HADOOP_COMMON_HOME/bin/hadoop-daemons.sh" \
43+
"$HADOOP_PREFIX/bin/hadoop-daemons.sh" \
4444
--config "$HADOOP_CONF_DIR" \
4545
--script "$bin/hdfs" stop datanode
4646
fi
@@ -50,7 +50,7 @@ fi
5050

5151
# if there are no secondary namenodes configured it returns
5252
# 0.0.0.0 or empty string
53-
SECONDARY_NAMENODES=$($HADOOP_HDFS_HOME/bin/hdfs getconf -secondarynamenodes 2>&-)
53+
SECONDARY_NAMENODES=$($HADOOP_PREFIX/bin/hdfs getconf -secondarynamenodes 2>&-)
5454
SECONDARY_NAMENODES=${SECONDARY_NAMENODES:-'0.0.0.0'}
5555

5656
if [ "$SECONDARY_NAMENODES" = '0.0.0.0' ] ; then
@@ -60,7 +60,7 @@ if [ "$SECONDARY_NAMENODES" = '0.0.0.0' ] ; then
6060
else
6161
echo "Stopping secondary namenodes [$SECONDARY_NAMENODES]"
6262

63-
"$HADOOP_COMMON_HOME/bin/hadoop-daemons.sh" \
63+
"$HADOOP_PREFIX/bin/hadoop-daemons.sh" \
6464
--config "$HADOOP_CONF_DIR" \
6565
--hostnames "$SECONDARY_NAMENODES" \
6666
--script "$bin/hdfs" stop secondarynamenode

0 commit comments

Comments
 (0)