Skip to content

Commit c86c5c0

Browse files
committed
HDFS-4510. Cover classes ClusterJspHelper/NamenodeJspHelper with unit tests. Contributed by Andrey Klochkov.
git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@1530669 13f79535-47bb-0310-9956-ffa450edef68
1 parent 3ea48c4 commit c86c5c0

File tree

3 files changed

+245
-20
lines changed

3 files changed

+245
-20
lines changed

hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -303,6 +303,9 @@ Release 2.3.0 - UNRELEASED
303303
HDFS-4512. Cover package org.apache.hadoop.hdfs.server.common with tests.
304304
(Vadim Bondarev via kihwal)
305305

306+
HDFS-4510. Cover classes ClusterJspHelper/NamenodeJspHelper with unit
307+
tests. (Andrey Klochkov via kihwal)
308+
306309
OPTIMIZATIONS
307310

308311
HDFS-5239. Allow FSNamesystem lock fairness to be configurable (daryn)
Lines changed: 59 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,59 @@
1+
/**
2+
* Licensed to the Apache Software Foundation (ASF) under one
3+
* or more contributor license agreements. See the NOTICE file
4+
* distributed with this work for additional information
5+
* regarding copyright ownership. The ASF licenses this file
6+
* to you under the Apache License, Version 2.0 (the
7+
* "License"); you may not use this file except in compliance
8+
* with the License. You may obtain a copy of the License at
9+
*
10+
* http://www.apache.org/licenses/LICENSE-2.0
11+
*
12+
* Unless required by applicable law or agreed to in writing, software
13+
* distributed under the License is distributed on an "AS IS" BASIS,
14+
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15+
* See the License for the specific language governing permissions and
16+
* limitations under the License.
17+
*/
18+
package org.apache.hadoop.hdfs.server.namenode;
19+
20+
import static org.junit.Assert.*;
21+
import org.apache.hadoop.conf.Configuration;
22+
import org.apache.hadoop.hdfs.MiniDFSCluster;
23+
import org.apache.hadoop.hdfs.server.namenode.ClusterJspHelper.ClusterStatus;
24+
import org.apache.hadoop.hdfs.server.namenode.ClusterJspHelper.DecommissionStatus;
25+
import org.junit.After;
26+
import org.junit.Before;
27+
import org.junit.Test;
28+
29+
public class TestClusterJspHelper {
30+
31+
private MiniDFSCluster cluster;
32+
private Configuration conf;
33+
34+
@Before
35+
public void setUp() throws Exception {
36+
conf = new Configuration();
37+
cluster = new MiniDFSCluster.Builder(conf).build();
38+
cluster.waitClusterUp();
39+
}
40+
41+
@After
42+
public void tearDown() throws Exception {
43+
if (cluster != null)
44+
cluster.shutdown();
45+
}
46+
47+
@Test(timeout = 15000)
48+
public void testClusterJspHelperReports() {
49+
ClusterJspHelper clusterJspHelper = new ClusterJspHelper();
50+
ClusterStatus clusterStatus = clusterJspHelper
51+
.generateClusterHealthReport();
52+
assertNotNull("testClusterJspHelperReports ClusterStatus is null",
53+
clusterStatus);
54+
DecommissionStatus decommissionStatus = clusterJspHelper
55+
.generateDecommissioningReport();
56+
assertNotNull("testClusterJspHelperReports DecommissionStatus is null",
57+
decommissionStatus);
58+
}
59+
}

hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestNameNodeJspHelper.java

Lines changed: 183 additions & 20 deletions
Original file line numberDiff line numberDiff line change
@@ -17,18 +17,22 @@
1717
*/
1818
package org.apache.hadoop.hdfs.server.namenode;
1919

20-
2120
import static org.apache.hadoop.hdfs.server.namenode.startupprogress.Phase.LOADING_EDITS;
2221
import static org.apache.hadoop.hdfs.server.namenode.startupprogress.Phase.LOADING_FSIMAGE;
2322
import static org.apache.hadoop.hdfs.server.namenode.startupprogress.Phase.SAFEMODE;
2423
import static org.apache.hadoop.hdfs.server.namenode.startupprogress.Phase.SAVING_CHECKPOINT;
24+
import static org.junit.Assert.assertTrue;
25+
import static org.junit.Assert.fail;
2526
import static org.mockito.Mockito.atLeastOnce;
27+
import static org.mockito.Mockito.doAnswer;
2628
import static org.mockito.Mockito.mock;
2729
import static org.mockito.Mockito.verify;
2830
import static org.mockito.Mockito.when;
2931

3032
import java.io.IOException;
33+
import java.util.ArrayList;
3134
import java.util.List;
35+
import java.util.regex.Matcher;
3236
import java.util.regex.Pattern;
3337

3438
import javax.servlet.ServletContext;
@@ -40,30 +44,44 @@
4044
import org.apache.hadoop.hdfs.DFSConfigKeys;
4145
import org.apache.hadoop.hdfs.HdfsConfiguration;
4246
import org.apache.hadoop.hdfs.MiniDFSCluster;
47+
import org.apache.hadoop.hdfs.server.blockmanagement.DatanodeDescriptor;
48+
import org.apache.hadoop.hdfs.server.blockmanagement.DatanodeManager;
49+
import org.apache.hadoop.hdfs.server.common.JspHelper;
50+
import org.apache.hadoop.hdfs.server.datanode.DataNode;
4351
import org.apache.hadoop.hdfs.server.namenode.startupprogress.StartupProgress;
4452
import org.apache.hadoop.hdfs.server.protocol.NamenodeProtocols;
53+
import org.apache.hadoop.hdfs.web.resources.UserParam;
4554
import org.apache.hadoop.security.UserGroupInformation;
46-
import org.junit.After;
55+
import org.apache.hadoop.util.VersionInfo;
56+
import org.junit.AfterClass;
4757
import org.junit.Assert;
48-
import org.junit.Before;
58+
import org.junit.BeforeClass;
4959
import org.junit.Test;
5060
import org.mockito.ArgumentCaptor;
61+
import org.mockito.invocation.InvocationOnMock;
62+
import org.mockito.stubbing.Answer;
5163
import org.znerd.xmlenc.XMLOutputter;
5264

53-
public class TestNameNodeJspHelper {
65+
import com.google.common.collect.ImmutableSet;
5466

55-
private MiniDFSCluster cluster = null;
56-
Configuration conf = null;
67+
public class TestNameNodeJspHelper {
5768

58-
@Before
59-
public void setUp() throws Exception {
69+
private static final int DATA_NODES_AMOUNT = 2;
70+
71+
private static MiniDFSCluster cluster;
72+
private static Configuration conf;
73+
private static final String NAMENODE_ATTRIBUTE_KEY = "name.node";
74+
75+
@BeforeClass
76+
public static void setUp() throws Exception {
6077
conf = new HdfsConfiguration();
61-
cluster = new MiniDFSCluster.Builder(conf).build();
62-
cluster.waitActive();
78+
cluster = new MiniDFSCluster.Builder(conf)
79+
.numDataNodes(DATA_NODES_AMOUNT).build();
80+
cluster.waitClusterUp();
6381
}
6482

65-
@After
66-
public void tearDown() throws Exception {
83+
@AfterClass
84+
public static void tearDown() throws Exception {
6785
if (cluster != null)
6886
cluster.shutdown();
6987
}
@@ -75,23 +93,23 @@ public void testDelegationToken() throws IOException, InterruptedException {
7593
UserGroupInformation ugi = UserGroupInformation.createRemoteUser("auser");
7694
String tokenString = NamenodeJspHelper.getDelegationToken(nn, request,
7795
conf, ugi);
78-
//tokenString returned must be null because security is disabled
96+
// tokenString returned must be null because security is disabled
7997
Assert.assertEquals(null, tokenString);
8098
}
81-
99+
82100
@Test
83-
public void tesSecurityModeText() {
101+
public void testSecurityModeText() {
84102
conf.set(DFSConfigKeys.HADOOP_SECURITY_AUTHENTICATION, "kerberos");
85103
UserGroupInformation.setConfiguration(conf);
86104
String securityOnOff = NamenodeJspHelper.getSecurityModeText();
87-
Assert.assertTrue("security mode doesn't match. Should be ON",
105+
Assert.assertTrue("security mode doesn't match. Should be ON",
88106
securityOnOff.contains("ON"));
89-
//Security is enabled
107+
// Security is enabled
90108
conf.set(DFSConfigKeys.HADOOP_SECURITY_AUTHENTICATION, "simple");
91109
UserGroupInformation.setConfiguration(conf);
92-
110+
93111
securityOnOff = NamenodeJspHelper.getSecurityModeText();
94-
Assert.assertTrue("security mode doesn't match. Should be OFF",
112+
Assert.assertTrue("security mode doesn't match. Should be OFF",
95113
securityOnOff.contains("OFF"));
96114
}
97115

@@ -192,7 +210,7 @@ public void testXMLCorruptBlockInfoNullNamesystem() throws IOException {
192210

193211
/**
194212
* Checks if the list contains any string that partially matches the regex.
195-
*
213+
*
196214
* @param list List<String> containing strings to check
197215
* @param regex String regex to check
198216
* @return boolean true if some string in list partially matches regex
@@ -206,4 +224,149 @@ private static boolean containsMatch(List<String> list, String regex) {
206224
}
207225
return false;
208226
}
227+
228+
@Test(timeout = 15000)
229+
public void testGetRandomDatanode() {
230+
ImmutableSet<String> set = ImmutableSet.of();
231+
NameNode nameNode = cluster.getNameNode();
232+
ImmutableSet.Builder<String> builder = ImmutableSet.builder();
233+
for (DataNode dataNode : cluster.getDataNodes()) {
234+
builder.add(dataNode.getDisplayName());
235+
}
236+
set = builder.build();
237+
238+
for (int i = 0; i < 10; i++) {
239+
DatanodeDescriptor dnDescriptor = NamenodeJspHelper
240+
.getRandomDatanode(nameNode);
241+
assertTrue("testGetRandomDatanode error",
242+
set.contains(dnDescriptor.toString()));
243+
}
244+
}
245+
246+
@Test(timeout = 15000)
247+
public void testNamenodeJspHelperRedirectToRandomDataNode() throws IOException, InterruptedException {
248+
final String urlPart = "browseDirectory.jsp?namenodeInfoPort=";
249+
250+
ServletContext context = mock(ServletContext.class);
251+
HttpServletRequest request = mock(HttpServletRequest.class);
252+
HttpServletResponse resp = mock(HttpServletResponse.class);
253+
254+
when(request.getScheme()).thenReturn("http");
255+
when(request.getParameter(UserParam.NAME)).thenReturn("localuser");
256+
when(context.getAttribute(NAMENODE_ATTRIBUTE_KEY)).thenReturn(
257+
cluster.getNameNode());
258+
when(context.getAttribute(JspHelper.CURRENT_CONF)).thenReturn(conf);
259+
ArgumentCaptor<String> captor = ArgumentCaptor.forClass(String.class);
260+
doAnswer(new Answer<String>() {
261+
@Override
262+
public String answer(InvocationOnMock invocation) throws Throwable {
263+
return null;
264+
}
265+
}).when(resp).sendRedirect(captor.capture());
266+
267+
NamenodeJspHelper.redirectToRandomDataNode(context, request, resp);
268+
assertTrue(captor.getValue().contains(urlPart));
269+
}
270+
271+
private enum DataNodeStatus {
272+
LIVE("[Live Datanodes(| +):(| +)]\\d"),
273+
DEAD("[Dead Datanodes(| +):(| +)]\\d");
274+
275+
private Pattern pattern;
276+
277+
public Pattern getPattern() {
278+
return pattern;
279+
}
280+
281+
DataNodeStatus(String line) {
282+
this.pattern = Pattern.compile(line);
283+
}
284+
}
285+
286+
private void checkDeadLiveNodes(NameNode nameNode, int deadCount,
287+
int lifeCount) {
288+
FSNamesystem ns = nameNode.getNamesystem();
289+
DatanodeManager dm = ns.getBlockManager().getDatanodeManager();
290+
List<DatanodeDescriptor> live = new ArrayList<DatanodeDescriptor>();
291+
List<DatanodeDescriptor> dead = new ArrayList<DatanodeDescriptor>();
292+
dm.fetchDatanodes(live, dead, true);
293+
assertTrue("checkDeadLiveNodes error !!!", (live.size() == lifeCount)
294+
&& dead.size() == deadCount);
295+
}
296+
297+
@Test(timeout = 15000)
298+
public void testNodeListJspGenerateNodesList() throws IOException {
299+
String output;
300+
NameNode nameNode = cluster.getNameNode();
301+
ServletContext context = mock(ServletContext.class);
302+
when(context.getAttribute("name.node")).thenReturn(nameNode);
303+
when(context.getAttribute(NameNodeHttpServer.NAMENODE_ADDRESS_ATTRIBUTE_KEY))
304+
.thenReturn(cluster.getNameNode().getHttpAddress());
305+
checkDeadLiveNodes(nameNode, 0, DATA_NODES_AMOUNT);
306+
output = getOutputFromGeneratedNodesList(context, DataNodeStatus.LIVE);
307+
assertCounts(DataNodeStatus.LIVE, output, DATA_NODES_AMOUNT);
308+
output = getOutputFromGeneratedNodesList(context, DataNodeStatus.DEAD);
309+
assertCounts(DataNodeStatus.DEAD, output, 0);
310+
}
311+
312+
private void assertCounts(DataNodeStatus dataNodeStatus, String output,
313+
int expectedCount) {
314+
Matcher matcher = DataNodeStatus.LIVE.getPattern().matcher(output);
315+
if (matcher.find()) {
316+
String digitLine = output.substring(matcher.start(), matcher.end())
317+
.trim();
318+
assertTrue("assertCounts error. actual != expected",
319+
Integer.valueOf(digitLine) == expectedCount);
320+
} else {
321+
fail("assertCount matcher error");
322+
}
323+
}
324+
325+
private String getOutputFromGeneratedNodesList(ServletContext context,
326+
DataNodeStatus dnStatus) throws IOException {
327+
JspWriter out = mock(JspWriter.class);
328+
ArgumentCaptor<String> captor = ArgumentCaptor.forClass(String.class);
329+
NamenodeJspHelper.NodeListJsp nodelistjsp = new NamenodeJspHelper.NodeListJsp();
330+
final StringBuffer buffer = new StringBuffer();
331+
doAnswer(new Answer<String>() {
332+
@Override
333+
public String answer(InvocationOnMock invok) {
334+
Object[] args = invok.getArguments();
335+
buffer.append((String) args[0]);
336+
return null;
337+
}
338+
}).when(out).print(captor.capture());
339+
HttpServletRequest request = mock(HttpServletRequest.class);
340+
when(request.getScheme()).thenReturn("http");
341+
when(request.getParameter("whatNodes")).thenReturn(dnStatus.name());
342+
nodelistjsp.generateNodesList(context, out, request);
343+
return buffer.toString();
344+
}
345+
346+
@Test(timeout = 15000)
347+
public void testGetInodeLimitText() {
348+
NameNode nameNode = cluster.getNameNode();
349+
FSNamesystem fsn = nameNode.getNamesystem();
350+
ImmutableSet<String> patterns =
351+
ImmutableSet.of("files and directories", "Heap Memory used", "Non Heap Memory used");
352+
String line = NamenodeJspHelper.getInodeLimitText(fsn);
353+
for(String pattern: patterns) {
354+
assertTrue("testInodeLimitText error " + pattern,
355+
line.contains(pattern));
356+
}
357+
}
358+
359+
@Test(timeout = 15000)
360+
public void testGetVersionTable() {
361+
NameNode nameNode = cluster.getNameNode();
362+
FSNamesystem fsn = nameNode.getNamesystem();
363+
ImmutableSet<String> patterns = ImmutableSet.of(VersionInfo.getVersion(),
364+
VersionInfo.getRevision(), VersionInfo.getUser(), VersionInfo.getBranch(),
365+
fsn.getClusterId(), fsn.getBlockPoolId());
366+
String line = NamenodeJspHelper.getVersionTable(fsn);
367+
for(String pattern: patterns) {
368+
assertTrue("testGetVersionTable error " + pattern,
369+
line.contains(pattern));
370+
}
371+
}
209372
}

0 commit comments

Comments
 (0)