Skip to content

Commit d8877af

Browse files
author
Suresh Srinivas
committed
HDFS-4635. Move BlockManager#computeCapacity to LightWeightGSet. Contributed by Suresh Srinivas.
git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@1461364 13f79535-47bb-0310-9956-ffa450edef68
1 parent 1a64c3d commit d8877af

File tree

5 files changed

+136
-29
lines changed

5 files changed

+136
-29
lines changed

hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -369,6 +369,8 @@ Release 2.0.5-beta - UNRELEASED
369369
HDFS-4246. The exclude node list should be more forgiving, for each output
370370
stream. (harsh via atm)
371371

372+
HDFS-4635. Move BlockManager#computeCapacity to LightWeightGSet. (suresh)
373+
372374
OPTIMIZATIONS
373375

374376
BUG FIXES

hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/BlockManager.java

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -235,6 +235,7 @@ public BlockManager(final Namesystem namesystem, final FSClusterStats stats,
235235
heartbeatManager = datanodeManager.getHeartbeatManager();
236236
invalidateBlocks = new InvalidateBlocks(datanodeManager);
237237

238+
// Compute the map capacity by allocating 2% of total memory
238239
blocksMap = new BlocksMap(DEFAULT_MAP_LOAD_FACTOR);
239240
blockplacement = BlockPlacementPolicy.getInstance(
240241
conf, stats, datanodeManager.getNetworkTopology());

hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/BlocksMap.java

Lines changed: 2 additions & 29 deletions
Original file line numberDiff line numberDiff line change
@@ -60,38 +60,11 @@ public void remove() {
6060
private GSet<Block, BlockInfo> blocks;
6161

6262
BlocksMap(final float loadFactor) {
63-
this.capacity = computeCapacity();
63+
// Use 2% of total memory to size the GSet capacity
64+
this.capacity = LightWeightGSet.computeCapacity(2.0, "BlocksMap");
6465
this.blocks = new LightWeightGSet<Block, BlockInfo>(capacity);
6566
}
6667

67-
/**
68-
* Let t = 2% of max memory.
69-
* Let e = round(log_2 t).
70-
* Then, we choose capacity = 2^e/(size of reference),
71-
* unless it is outside the close interval [1, 2^30].
72-
*/
73-
private static int computeCapacity() {
74-
//VM detection
75-
//See http://java.sun.com/docs/hotspot/HotSpotFAQ.html#64bit_detection
76-
final String vmBit = System.getProperty("sun.arch.data.model");
77-
78-
//2% of max memory
79-
final double twoPC = Runtime.getRuntime().maxMemory()/50.0;
80-
81-
//compute capacity
82-
final int e1 = (int)(Math.log(twoPC)/Math.log(2.0) + 0.5);
83-
final int e2 = e1 - ("32".equals(vmBit)? 2: 3);
84-
final int exponent = e2 < 0? 0: e2 > 30? 30: e2;
85-
final int c = 1 << exponent;
86-
87-
if (LightWeightGSet.LOG.isDebugEnabled()) {
88-
LightWeightGSet.LOG.debug("VM type = " + vmBit + "-bit");
89-
LightWeightGSet.LOG.debug("2% max memory = " + twoPC/(1 << 20) + " MB");
90-
LightWeightGSet.LOG.debug("capacity = 2^" + exponent
91-
+ " = " + c + " entries");
92-
}
93-
return c;
94-
}
9568

9669
void close() {
9770
// Empty blocks once GSet#clear is implemented (HDFS-3940)

hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/util/LightWeightGSet.java

Lines changed: 53 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -24,8 +24,11 @@
2424
import org.apache.commons.logging.Log;
2525
import org.apache.commons.logging.LogFactory;
2626
import org.apache.hadoop.classification.InterfaceAudience;
27+
import org.apache.hadoop.util.StringUtils;
2728
import org.apache.hadoop.HadoopIllegalArgumentException;
2829

30+
import com.google.common.annotations.VisibleForTesting;
31+
2932
/**
3033
* A low memory footprint {@link GSet} implementation,
3134
* which uses an array for storing the elements
@@ -285,4 +288,54 @@ public void remove() {
285288
throw new UnsupportedOperationException("Remove is not supported.");
286289
}
287290
}
291+
292+
/**
293+
* Let t = percentage of max memory.
294+
* Let e = round(log_2 t).
295+
* Then, we choose capacity = 2^e/(size of reference),
296+
* unless it is outside the close interval [1, 2^30].
297+
*/
298+
public static int computeCapacity(double percentage, String mapName) {
299+
return computeCapacity(Runtime.getRuntime().maxMemory(), percentage,
300+
mapName);
301+
}
302+
303+
@VisibleForTesting
304+
static int computeCapacity(long maxMemory, double percentage,
305+
String mapName) {
306+
if (percentage > 100.0 || percentage < 0.0) {
307+
throw new HadoopIllegalArgumentException("Percentage " + percentage
308+
+ " must be greater than or equal to 0 "
309+
+ " and less than or equal to 100");
310+
}
311+
if (maxMemory < 0) {
312+
throw new HadoopIllegalArgumentException("Memory " + maxMemory
313+
+ " must be greater than or equal to 0");
314+
}
315+
if (percentage == 0.0 || maxMemory == 0) {
316+
return 0;
317+
}
318+
//VM detection
319+
//See http://java.sun.com/docs/hotspot/HotSpotFAQ.html#64bit_detection
320+
final String vmBit = System.getProperty("sun.arch.data.model");
321+
322+
//Percentage of max memory
323+
final double percentDivisor = 100.0/percentage;
324+
final double percentMemory = maxMemory/percentDivisor;
325+
326+
//compute capacity
327+
final int e1 = (int)(Math.log(percentMemory)/Math.log(2.0) + 0.5);
328+
final int e2 = e1 - ("32".equals(vmBit)? 2: 3);
329+
final int exponent = e2 < 0? 0: e2 > 30? 30: e2;
330+
final int c = 1 << exponent;
331+
332+
if (LightWeightGSet.LOG.isDebugEnabled()) {
333+
LOG.debug("Computing capacity for map " + mapName);
334+
LOG.debug("VM type = " + vmBit + "-bit");
335+
LOG.debug(percentage + "% max memory = "
336+
+ StringUtils.TraditionalBinaryPrefix.long2String(maxMemory, "B", 1));
337+
LOG.debug("capacity = 2^" + exponent + " = " + c + " entries");
338+
}
339+
return c;
340+
}
288341
}

hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/util/TestGSet.java

Lines changed: 78 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -21,6 +21,7 @@
2121
import java.util.Iterator;
2222
import java.util.Random;
2323

24+
import org.apache.hadoop.HadoopIllegalArgumentException;
2425
import org.apache.hadoop.util.Time;
2526
import org.junit.Assert;
2627
import org.junit.Test;
@@ -452,4 +453,81 @@ public void setNext(LightWeightGSet.LinkedElement e) {
452453
next = e;
453454
}
454455
}
456+
457+
/**
458+
* Test for {@link LightWeightGSet#computeCapacity(double, String)}
459+
* with invalid percent less than 0.
460+
*/
461+
@Test(expected=HadoopIllegalArgumentException.class)
462+
public void testComputeCapacityNegativePercent() {
463+
LightWeightGSet.computeCapacity(1024, -1.0, "testMap");
464+
}
465+
466+
/**
467+
* Test for {@link LightWeightGSet#computeCapacity(double, String)}
468+
* with invalid percent greater than 100.
469+
*/
470+
@Test(expected=HadoopIllegalArgumentException.class)
471+
public void testComputeCapacityInvalidPercent() {
472+
LightWeightGSet.computeCapacity(1024, 101.0, "testMap");
473+
}
474+
475+
/**
476+
* Test for {@link LightWeightGSet#computeCapacity(double, String)}
477+
* with invalid negative max memory
478+
*/
479+
@Test(expected=HadoopIllegalArgumentException.class)
480+
public void testComputeCapacityInvalidMemory() {
481+
LightWeightGSet.computeCapacity(-1, 50.0, "testMap");
482+
}
483+
484+
private static boolean isPowerOfTwo(int num) {
485+
return num == 0 || (num > 0 && Integer.bitCount(num) == 1);
486+
}
487+
488+
/** Return capacity as percentage of total memory */
489+
private static int getPercent(long total, int capacity) {
490+
// Reference size in bytes
491+
double referenceSize =
492+
System.getProperty("sun.arch.data.model").equals("32") ? 4.0 : 8.0;
493+
return (int)(((capacity * referenceSize)/total) * 100.0);
494+
}
495+
496+
/** Return capacity as percentage of total memory */
497+
private static void testCapacity(long maxMemory, double percent) {
498+
int capacity = LightWeightGSet.computeCapacity(maxMemory, percent, "map");
499+
LightWeightGSet.LOG.info("Validating - total memory " + maxMemory + " percent "
500+
+ percent + " returned capacity " + capacity);
501+
// Returned capacity is zero or power of two
502+
Assert.assertTrue(isPowerOfTwo(capacity));
503+
504+
// Ensure the capacity returned is the nearest to the asked perecentage
505+
int capacityPercent = getPercent(maxMemory, capacity);
506+
if (capacityPercent == percent) {
507+
return;
508+
} else if (capacityPercent > percent) {
509+
Assert.assertTrue(getPercent(maxMemory, capacity * 2) > percent);
510+
} else {
511+
Assert.assertTrue(getPercent(maxMemory, capacity / 2) < percent);
512+
}
513+
}
514+
515+
/**
516+
* Test for {@link LightWeightGSet#computeCapacity(double, String)}
517+
*/
518+
@Test
519+
public void testComputeCapacity() {
520+
// Tests for boundary conditions where percent or memory are zero
521+
testCapacity(0, 0.0);
522+
testCapacity(100, 0.0);
523+
testCapacity(0, 100.0);
524+
525+
// Compute capacity for some 100 random max memory and percentage
526+
Random r = new Random();
527+
for (int i = 0; i < 100; i++) {
528+
long maxMemory = r.nextInt(Integer.MAX_VALUE);
529+
double percent = r.nextInt(101);
530+
testCapacity(maxMemory, percent);
531+
}
532+
}
455533
}

0 commit comments

Comments
 (0)