Skip to content

Commit 5239fbb

Browse files
committed
Command hdfs dfs -rm -r can't remove empty directory. Contributed by Yongjun Zhang.
git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@1594036 13f79535-47bb-0310-9956-ffa450edef68
1 parent e944408 commit 5239fbb

File tree

6 files changed

+334
-26
lines changed

6 files changed

+334
-26
lines changed

hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -440,12 +440,15 @@ Release 2.5.0 - UNRELEASED
440440
HDFS-6337. Setfacl testcase is failing due to dash character in username
441441
in TestAclCLI (umamahesh)
442442

443-
HDFS-5381. ExtendedBlock#hashCode should use both blockId and block pool ID
443+
HDFS-5381. ExtendedBlock#hashCode should use both blockId and block pool ID
444444
(Benoy Antony via Colin Patrick McCabe)
445445

446446
HDFS-6240. WebImageViewer returns 404 if LISTSTATUS to an empty directory.
447447
(Akira Ajisaka via wheat9)
448448

449+
HDFS-6351. Command hdfs dfs -rm -r can't remove empty directory.
450+
(Yongjun Zhang via wang)
451+
449452
Release 2.4.1 - UNRELEASED
450453

451454
INCOMPATIBLE CHANGES

hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java

Lines changed: 15 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -139,6 +139,7 @@
139139
import org.apache.hadoop.fs.Options.Rename;
140140
import org.apache.hadoop.fs.ParentNotDirectoryException;
141141
import org.apache.hadoop.fs.Path;
142+
import org.apache.hadoop.fs.PathIsNotEmptyDirectoryException;
142143
import org.apache.hadoop.fs.UnresolvedLinkException;
143144
import org.apache.hadoop.fs.permission.AclEntry;
144145
import org.apache.hadoop.fs.permission.AclStatus;
@@ -3243,10 +3244,11 @@ private boolean renameToInternal(FSPermissionChecker pc, String src,
32433244
// Rename does not operates on link targets
32443245
// Do not resolveLink when checking permissions of src and dst
32453246
// Check write access to parent of src
3246-
checkPermission(pc, src, false, null, FsAction.WRITE, null, null, false);
3247+
checkPermission(pc, src, false, null, FsAction.WRITE, null, null,
3248+
false, false);
32473249
// Check write access to ancestor of dst
32483250
checkPermission(pc, actualdst, false, FsAction.WRITE, null, null, null,
3249-
false);
3251+
false, false);
32503252
}
32513253

32523254
if (dir.renameTo(src, dst, logRetryCache)) {
@@ -3307,9 +3309,11 @@ private void renameToInternal(FSPermissionChecker pc, String src, String dst,
33073309
// Rename does not operates on link targets
33083310
// Do not resolveLink when checking permissions of src and dst
33093311
// Check write access to parent of src
3310-
checkPermission(pc, src, false, null, FsAction.WRITE, null, null, false);
3312+
checkPermission(pc, src, false, null, FsAction.WRITE, null, null, false,
3313+
false);
33113314
// Check write access to ancestor of dst
3312-
checkPermission(pc, dst, false, FsAction.WRITE, null, null, null, false);
3315+
checkPermission(pc, dst, false, FsAction.WRITE, null, null, null, false,
3316+
false);
33133317
}
33143318

33153319
dir.renameTo(src, dst, logRetryCache, options);
@@ -3389,11 +3393,11 @@ private boolean deleteInternal(String src, boolean recursive,
33893393
checkNameNodeSafeMode("Cannot delete " + src);
33903394
src = FSDirectory.resolvePath(src, pathComponents, dir);
33913395
if (!recursive && dir.isNonEmptyDirectory(src)) {
3392-
throw new IOException(src + " is non empty");
3396+
throw new PathIsNotEmptyDirectoryException(src + " is non empty");
33933397
}
33943398
if (enforcePermission && isPermissionEnabled) {
33953399
checkPermission(pc, src, false, null, FsAction.WRITE, null,
3396-
FsAction.ALL, false);
3400+
FsAction.ALL, true, false);
33973401
}
33983402
// Unlink the target directory from directory tree
33993403
if (!dir.delete(src, collectedBlocks, removedINodes, logRetryCache)) {
@@ -3545,7 +3549,8 @@ HdfsFileStatus getFileInfo(String src, boolean resolveLink)
35453549
checkOperation(OperationCategory.READ);
35463550
src = FSDirectory.resolvePath(src, pathComponents, dir);
35473551
if (isPermissionEnabled) {
3548-
checkPermission(pc, src, false, null, null, null, null, resolveLink);
3552+
checkPermission(pc, src, false, null, null, null, null, false,
3553+
resolveLink);
35493554
}
35503555
stat = dir.getFileInfo(src, resolveLink);
35513556
} catch (AccessControlException e) {
@@ -5544,7 +5549,7 @@ private void checkPermission(FSPermissionChecker pc,
55445549
FsAction parentAccess, FsAction access, FsAction subAccess)
55455550
throws AccessControlException, UnresolvedLinkException {
55465551
checkPermission(pc, path, doCheckOwner, ancestorAccess,
5547-
parentAccess, access, subAccess, true);
5552+
parentAccess, access, subAccess, false, true);
55485553
}
55495554

55505555
/**
@@ -5555,14 +5560,14 @@ private void checkPermission(FSPermissionChecker pc,
55555560
private void checkPermission(FSPermissionChecker pc,
55565561
String path, boolean doCheckOwner, FsAction ancestorAccess,
55575562
FsAction parentAccess, FsAction access, FsAction subAccess,
5558-
boolean resolveLink)
5563+
boolean ignoreEmptyDir, boolean resolveLink)
55595564
throws AccessControlException, UnresolvedLinkException {
55605565
if (!pc.isSuperUser()) {
55615566
dir.waitForReady();
55625567
readLock();
55635568
try {
55645569
pc.checkPermission(path, dir, doCheckOwner, ancestorAccess,
5565-
parentAccess, access, subAccess, resolveLink);
5570+
parentAccess, access, subAccess, ignoreEmptyDir, resolveLink);
55665571
} finally {
55675572
readUnlock();
55685573
}

hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSPermissionChecker.java

Lines changed: 12 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -32,6 +32,7 @@
3232
import org.apache.hadoop.fs.permission.AclEntryType;
3333
import org.apache.hadoop.fs.permission.FsAction;
3434
import org.apache.hadoop.fs.permission.FsPermission;
35+
import org.apache.hadoop.hdfs.util.ReadOnlyList;
3536
import org.apache.hadoop.security.AccessControlException;
3637
import org.apache.hadoop.security.UserGroupInformation;
3738
import org.apache.hadoop.util.StringUtils;
@@ -136,6 +137,7 @@ public void checkSuperuserPrivilege()
136137
* @param subAccess If path is a directory,
137138
* it is the access required of the path and all the sub-directories.
138139
* If path is not a directory, there is no effect.
140+
* @param ignoreEmptyDir Ignore permission checking for empty directory?
139141
* @param resolveLink whether to resolve the final path component if it is
140142
* a symlink
141143
* @throws AccessControlException
@@ -146,7 +148,7 @@ public void checkSuperuserPrivilege()
146148
*/
147149
void checkPermission(String path, FSDirectory dir, boolean doCheckOwner,
148150
FsAction ancestorAccess, FsAction parentAccess, FsAction access,
149-
FsAction subAccess, boolean resolveLink)
151+
FsAction subAccess, boolean ignoreEmptyDir, boolean resolveLink)
150152
throws AccessControlException, UnresolvedLinkException {
151153
if (LOG.isDebugEnabled()) {
152154
LOG.debug("ACCESS CHECK: " + this
@@ -155,6 +157,7 @@ void checkPermission(String path, FSDirectory dir, boolean doCheckOwner,
155157
+ ", parentAccess=" + parentAccess
156158
+ ", access=" + access
157159
+ ", subAccess=" + subAccess
160+
+ ", ignoreEmptyDir=" + ignoreEmptyDir
158161
+ ", resolveLink=" + resolveLink);
159162
}
160163
// check if (parentAccess != null) && file exists, then check sb
@@ -182,7 +185,7 @@ void checkPermission(String path, FSDirectory dir, boolean doCheckOwner,
182185
check(last, snapshotId, access);
183186
}
184187
if (subAccess != null) {
185-
checkSubAccess(last, snapshotId, subAccess);
188+
checkSubAccess(last, snapshotId, subAccess, ignoreEmptyDir);
186189
}
187190
if (doCheckOwner) {
188191
checkOwner(last, snapshotId);
@@ -207,18 +210,21 @@ private void checkTraverse(INode[] inodes, int last, int snapshotId
207210
}
208211

209212
/** Guarded by {@link FSNamesystem#readLock()} */
210-
private void checkSubAccess(INode inode, int snapshotId, FsAction access
211-
) throws AccessControlException {
213+
private void checkSubAccess(INode inode, int snapshotId, FsAction access,
214+
boolean ignoreEmptyDir) throws AccessControlException {
212215
if (inode == null || !inode.isDirectory()) {
213216
return;
214217
}
215218

216219
Stack<INodeDirectory> directories = new Stack<INodeDirectory>();
217220
for(directories.push(inode.asDirectory()); !directories.isEmpty(); ) {
218221
INodeDirectory d = directories.pop();
219-
check(d, snapshotId, access);
222+
ReadOnlyList<INode> cList = d.getChildrenList(snapshotId);
223+
if (!(cList.isEmpty() && ignoreEmptyDir)) {
224+
check(d, snapshotId, access);
225+
}
220226

221-
for(INode child : d.getChildrenList(snapshotId)) {
227+
for(INode child : cList) {
222228
if (child.isDirectory()) {
223229
directories.push(child.asDirectory());
224230
}

hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDFSPermission.java

Lines changed: 27 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -429,6 +429,7 @@ private void testPermissionCheckingPerUser(UserGroupInformation ugi,
429429
short[] ancestorPermission, short[] parentPermission,
430430
short[] filePermission, Path[] parentDirs, Path[] files, Path[] dirs)
431431
throws Exception {
432+
boolean[] isDirEmpty = new boolean[NUM_TEST_PERMISSIONS];
432433
login(SUPERUSER);
433434
for (int i = 0; i < NUM_TEST_PERMISSIONS; i++) {
434435
create(OpType.CREATE, files[i]);
@@ -441,6 +442,8 @@ private void testPermissionCheckingPerUser(UserGroupInformation ugi,
441442
FsPermission fsPermission = new FsPermission(filePermission[i]);
442443
fs.setPermission(files[i], fsPermission);
443444
fs.setPermission(dirs[i], fsPermission);
445+
446+
isDirEmpty[i] = (fs.listStatus(dirs[i]).length == 0);
444447
}
445448

446449
login(ugi);
@@ -461,7 +464,7 @@ private void testPermissionCheckingPerUser(UserGroupInformation ugi,
461464
parentPermission[i], ancestorPermission[next], parentPermission[next]);
462465
testDeleteFile(ugi, files[i], ancestorPermission[i], parentPermission[i]);
463466
testDeleteDir(ugi, dirs[i], ancestorPermission[i], parentPermission[i],
464-
filePermission[i], null);
467+
filePermission[i], null, isDirEmpty[i]);
465468
}
466469

467470
// test non existent file
@@ -924,7 +927,8 @@ void call() throws IOException {
924927
}
925928

926929
/* A class that verifies the permission checking is correct for
927-
* directory deletion */
930+
* directory deletion
931+
*/
928932
private class DeleteDirPermissionVerifier extends DeletePermissionVerifier {
929933
private short[] childPermissions;
930934

@@ -958,6 +962,17 @@ protected boolean expectPermissionDeny() {
958962
}
959963
}
960964

965+
/* A class that verifies the permission checking is correct for
966+
* empty-directory deletion
967+
*/
968+
private class DeleteEmptyDirPermissionVerifier extends DeleteDirPermissionVerifier {
969+
@Override
970+
void setOpPermission() {
971+
this.opParentPermission = SEARCH_MASK | WRITE_MASK;
972+
this.opPermission = NULL_MASK;
973+
}
974+
}
975+
961976
final DeletePermissionVerifier fileDeletionVerifier =
962977
new DeletePermissionVerifier();
963978

@@ -971,14 +986,19 @@ private void testDeleteFile(UserGroupInformation ugi, Path file,
971986
final DeleteDirPermissionVerifier dirDeletionVerifier =
972987
new DeleteDirPermissionVerifier();
973988

989+
final DeleteEmptyDirPermissionVerifier emptyDirDeletionVerifier =
990+
new DeleteEmptyDirPermissionVerifier();
991+
974992
/* test if the permission checking of directory deletion is correct */
975993
private void testDeleteDir(UserGroupInformation ugi, Path path,
976994
short ancestorPermission, short parentPermission, short permission,
977-
short[] childPermissions) throws Exception {
978-
dirDeletionVerifier.set(path, ancestorPermission, parentPermission,
979-
permission, childPermissions);
980-
dirDeletionVerifier.verifyPermission(ugi);
981-
995+
short[] childPermissions,
996+
final boolean isDirEmpty) throws Exception {
997+
DeleteDirPermissionVerifier ddpv = isDirEmpty?
998+
emptyDirDeletionVerifier : dirDeletionVerifier;
999+
ddpv.set(path, ancestorPermission, parentPermission, permission,
1000+
childPermissions);
1001+
ddpv.verifyPermission(ugi);
9821002
}
9831003

9841004
/* log into dfs as the given user */

0 commit comments

Comments
 (0)