Skip to content

Commit c8d17cb

Browse files
committed
HDFS-6345. DFS.listCacheDirectives() should allow filtering based on cache directive ID. (wang)
git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@1595086 13f79535-47bb-0310-9956-ffa450edef68
1 parent bf2e8e5 commit c8d17cb

File tree

6 files changed

+131
-8
lines changed

6 files changed

+131
-8
lines changed

hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -366,6 +366,9 @@ Release 2.5.0 - UNRELEASED
366366
HDFS-5683. Better audit log messages for caching operations.
367367
(Abhiraj Butala via wang)
368368

369+
HDFS-6345. DFS.listCacheDirectives() should allow filtering based on
370+
cache directive ID. (wang)
371+
369372
OPTIMIZATIONS
370373

371374
HDFS-6214. Webhdfs has poor throughput for files >2GB (daryn)

hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/CacheDirectiveIterator.java

Lines changed: 68 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -23,6 +23,10 @@
2323
import org.apache.hadoop.classification.InterfaceAudience;
2424
import org.apache.hadoop.classification.InterfaceStability;
2525
import org.apache.hadoop.fs.BatchedRemoteIterator;
26+
import org.apache.hadoop.fs.InvalidRequestException;
27+
import org.apache.hadoop.ipc.RemoteException;
28+
29+
import com.google.common.base.Preconditions;
2630

2731
/**
2832
* CacheDirectiveIterator is a remote iterator that iterates cache directives.
@@ -33,7 +37,7 @@
3337
public class CacheDirectiveIterator
3438
extends BatchedRemoteIterator<Long, CacheDirectiveEntry> {
3539

36-
private final CacheDirectiveInfo filter;
40+
private CacheDirectiveInfo filter;
3741
private final ClientProtocol namenode;
3842

3943
public CacheDirectiveIterator(ClientProtocol namenode,
@@ -43,10 +47,72 @@ public CacheDirectiveIterator(ClientProtocol namenode,
4347
this.filter = filter;
4448
}
4549

50+
private static CacheDirectiveInfo removeIdFromFilter(CacheDirectiveInfo filter) {
51+
CacheDirectiveInfo.Builder builder = new CacheDirectiveInfo.Builder(filter);
52+
builder.setId(null);
53+
return builder.build();
54+
}
55+
56+
/**
57+
* Used for compatibility when communicating with a server version that
58+
* does not support filtering directives by ID.
59+
*/
60+
private static class SingleEntry implements
61+
BatchedEntries<CacheDirectiveEntry> {
62+
63+
private final CacheDirectiveEntry entry;
64+
65+
public SingleEntry(final CacheDirectiveEntry entry) {
66+
this.entry = entry;
67+
}
68+
69+
@Override
70+
public CacheDirectiveEntry get(int i) {
71+
if (i > 0) {
72+
return null;
73+
}
74+
return entry;
75+
}
76+
77+
@Override
78+
public int size() {
79+
return 1;
80+
}
81+
82+
@Override
83+
public boolean hasMore() {
84+
return false;
85+
}
86+
}
87+
4688
@Override
4789
public BatchedEntries<CacheDirectiveEntry> makeRequest(Long prevKey)
4890
throws IOException {
49-
return namenode.listCacheDirectives(prevKey, filter);
91+
BatchedEntries<CacheDirectiveEntry> entries = null;
92+
try {
93+
entries = namenode.listCacheDirectives(prevKey, filter);
94+
} catch (IOException e) {
95+
if (e.getMessage().contains("Filtering by ID is unsupported")) {
96+
// Retry case for old servers, do the filtering client-side
97+
long id = filter.getId();
98+
filter = removeIdFromFilter(filter);
99+
// Using id - 1 as prevId should get us a window containing the id
100+
// This is somewhat brittle, since it depends on directives being
101+
// returned in order of ascending ID.
102+
entries = namenode.listCacheDirectives(id - 1, filter);
103+
for (int i=0; i<entries.size(); i++) {
104+
CacheDirectiveEntry entry = entries.get(i);
105+
if (entry.getInfo().getId().equals((Long)id)) {
106+
return new SingleEntry(entry);
107+
}
108+
}
109+
throw new RemoteException(InvalidRequestException.class.getName(),
110+
"Did not find requested id " + id);
111+
}
112+
throw e;
113+
}
114+
Preconditions.checkNotNull(entries);
115+
return entries;
50116
}
51117

52118
@Override

hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/CacheManager.java

Lines changed: 22 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -691,15 +691,25 @@ public void removeDirective(long id, FSPermissionChecker pc)
691691
assert namesystem.hasReadLock();
692692
final int NUM_PRE_ALLOCATED_ENTRIES = 16;
693693
String filterPath = null;
694-
if (filter.getId() != null) {
695-
throw new IOException("Filtering by ID is unsupported.");
696-
}
697694
if (filter.getPath() != null) {
698695
filterPath = validatePath(filter);
699696
}
700697
if (filter.getReplication() != null) {
701-
throw new IOException("Filtering by replication is unsupported.");
698+
throw new InvalidRequestException(
699+
"Filtering by replication is unsupported.");
700+
}
701+
702+
// Querying for a single ID
703+
final Long id = filter.getId();
704+
if (id != null) {
705+
if (!directivesById.containsKey(id)) {
706+
throw new InvalidRequestException("Did not find requested id " + id);
707+
}
708+
// Since we use a tailMap on directivesById, setting prev to id-1 gets
709+
// us the directive with the id (if present)
710+
prevId = id - 1;
702711
}
712+
703713
ArrayList<CacheDirectiveEntry> replies =
704714
new ArrayList<CacheDirectiveEntry>(NUM_PRE_ALLOCATED_ENTRIES);
705715
int numReplies = 0;
@@ -711,6 +721,14 @@ public void removeDirective(long id, FSPermissionChecker pc)
711721
}
712722
CacheDirective curDirective = cur.getValue();
713723
CacheDirectiveInfo info = cur.getValue().toInfo();
724+
725+
// If the requested ID is present, it should be the first item.
726+
// Hitting this case means the ID is not present, or we're on the second
727+
// item and should break out.
728+
if (id != null &&
729+
!(info.getId().equals(id))) {
730+
break;
731+
}
714732
if (filter.getPool() != null &&
715733
!info.getPool().equals(filter.getPool())) {
716734
continue;

hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/CacheAdmin.java

Lines changed: 8 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -503,19 +503,21 @@ public String getName() {
503503

504504
@Override
505505
public String getShortUsage() {
506-
return "[" + getName() + " [-stats] [-path <path>] [-pool <pool>]]\n";
506+
return "[" + getName()
507+
+ " [-stats] [-path <path>] [-pool <pool>] [-id <id>]\n";
507508
}
508509

509510
@Override
510511
public String getLongUsage() {
511512
TableListing listing = getOptionDescriptionListing();
513+
listing.addRow("-stats", "List path-based cache directive statistics.");
512514
listing.addRow("<path>", "List only " +
513515
"cache directives with this path. " +
514516
"Note that if there is a cache directive for <path> " +
515517
"in a cache pool that we don't have read access for, it " +
516518
"will not be listed.");
517519
listing.addRow("<pool>", "List only path cache directives in that pool.");
518-
listing.addRow("-stats", "List path-based cache directive statistics.");
520+
listing.addRow("<id>", "List the cache directive with this id.");
519521
return getShortUsage() + "\n" +
520522
"List cache directives.\n\n" +
521523
listing.toString();
@@ -534,6 +536,10 @@ public int run(Configuration conf, List<String> args) throws IOException {
534536
builder.setPool(poolFilter);
535537
}
536538
boolean printStats = StringUtils.popOption("-stats", args);
539+
String idFilter = StringUtils.popOptionWithArgument("-id", args);
540+
if (idFilter != null) {
541+
builder.setId(Long.parseLong(idFilter));
542+
}
537543
if (!args.isEmpty()) {
538544
System.err.println("Can't understand argument: " + args.get(0));
539545
return 1;

hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestCacheDirectives.java

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -477,6 +477,12 @@ public void testAddRemoveDirectives() throws Exception {
477477
iter = dfs.listCacheDirectives(
478478
new CacheDirectiveInfo.Builder().setPool("pool2").build());
479479
validateListAll(iter, betaId);
480+
iter = dfs.listCacheDirectives(
481+
new CacheDirectiveInfo.Builder().setId(alphaId2).build());
482+
validateListAll(iter, alphaId2);
483+
iter = dfs.listCacheDirectives(
484+
new CacheDirectiveInfo.Builder().setId(relativeId).build());
485+
validateListAll(iter, relativeId);
480486

481487
dfs.removeCacheDirective(betaId);
482488
iter = dfs.listCacheDirectives(

hadoop-hdfs-project/hadoop-hdfs/src/test/resources/testCacheAdminConf.xml

Lines changed: 24 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -519,5 +519,29 @@
519519
</comparator>
520520
</comparators>
521521
</test>
522+
523+
<test> <!--Tested -->
524+
<description>Testing listing a single cache directive</description>
525+
<test-commands>
526+
<cache-admin-command>-addPool pool1</cache-admin-command>
527+
<cache-admin-command>-addDirective -path /foo -pool pool1 -ttl 2d</cache-admin-command>
528+
<cache-admin-command>-addDirective -path /bar -pool pool1 -ttl 24h</cache-admin-command>
529+
<cache-admin-command>-addDirective -path /baz -replication 2 -pool pool1 -ttl 60m</cache-admin-command>
530+
<cache-admin-command>-listDirectives -stats -id 30</cache-admin-command>
531+
</test-commands>
532+
<cleanup-commands>
533+
<cache-admin-command>-removePool pool1</cache-admin-command>
534+
</cleanup-commands>
535+
<comparators>
536+
<comparator>
537+
<type>SubstringComparator</type>
538+
<expected-output>Found 1 entry</expected-output>
539+
</comparator>
540+
<comparator>
541+
<type>SubstringComparator</type>
542+
<expected-output>30 pool1 1</expected-output>
543+
</comparator>
544+
</comparators>
545+
</test>
522546
</tests>
523547
</configuration>

0 commit comments

Comments
 (0)