Skip to content

Commit ff36a3a

Browse files
author
Suresh Srinivas
committed
MAPREDUCE-5177. Use common utils FileUtil#setReadable/Writable/Executable & FileUtil#canRead/Write/Execute. Contributed by Ivan Mitic.
git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@1477403 13f79535-47bb-0310-9956-ffa450edef68
1 parent b22bf2d commit ff36a3a

File tree

4 files changed

+8
-3
lines changed

4 files changed

+8
-3
lines changed

hadoop-mapreduce-project/CHANGES.txt

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -159,6 +159,9 @@ Trunk (Unreleased)
159159
HADOOP-9372. Fix bad timeout annotations on tests.
160160
(Arpit Agarwal via suresh)
161161

162+
MAPREDUCE-5177. Use common utils FileUtil#setReadable/Writable/Executable &
163+
FileUtil#canRead/Write/Execute. (Ivan Mitic via suresh)
164+
162165
Release 2.0.5-beta - UNRELEASED
163166

164167
INCOMPATIBLE CHANGES

hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/pipes/Application.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -113,7 +113,7 @@ class Application<K1 extends WritableComparable, V1 extends Writable,
113113
cmd.add(interpretor);
114114
}
115115
String executable = DistributedCache.getLocalCacheFiles(conf)[0].toString();
116-
if (!new File(executable).canExecute()) {
116+
if (!FileUtil.canExecute(new File(executable))) {
117117
// LinuxTaskController sets +x permissions on all distcache files already.
118118
// In case of DefaultTaskController, set permissions here.
119119
FileUtil.chmod(executable, "u+x");

hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/streaming/PathFinder.java

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -21,6 +21,7 @@
2121
import java.io.*;
2222

2323
import org.apache.hadoop.classification.InterfaceAudience;
24+
import org.apache.hadoop.fs.FileUtil;
2425

2526
/**
2627
* Maps a relative pathname to an absolute pathname using the PATH environment.
@@ -79,7 +80,7 @@ public File getAbsolutePath(String filename) {
7980
f = new File(entry + fileSep + filename);
8081
}
8182
// see if the filename matches and we can read it
82-
if (f.isFile() && f.canRead()) {
83+
if (f.isFile() && FileUtil.canRead(f)) {
8384
return f;
8485
}
8586
classvalue = classvalue.substring(val + 1).trim();

hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/streaming/StreamJob.java

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -46,6 +46,7 @@
4646
import org.apache.hadoop.mapreduce.filecache.DistributedCache;
4747
import org.apache.hadoop.mapreduce.server.jobtracker.JTConfig;
4848
import org.apache.hadoop.fs.FileSystem;
49+
import org.apache.hadoop.fs.FileUtil;
4950
import org.apache.hadoop.fs.Path;
5051
import org.apache.hadoop.fs.FileAlreadyExistsException;
5152
import org.apache.hadoop.mapred.FileInputFormat;
@@ -394,7 +395,7 @@ private void validate(final List<String> values)
394395
throws IllegalArgumentException {
395396
for (String file : values) {
396397
File f = new File(file);
397-
if (!f.canRead()) {
398+
if (!FileUtil.canRead(f)) {
398399
fail("File: " + f.getAbsolutePath()
399400
+ " does not exist, or is not readable.");
400401
}

0 commit comments

Comments
 (0)