Skip to content

Commit d9bad63

Browse files
committed
HDFS-5312. Generate HTTP/HTTPS URL in DFSUtil#getInfoServer() based on the configured http policy. Contributed by Haohui Mai.
git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@1548629 13f79535-47bb-0310-9956-ffa450edef68
1 parent 4aee06a commit d9bad63

File tree

16 files changed

+212
-144
lines changed

16 files changed

+212
-144
lines changed

hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -233,6 +233,9 @@ Trunk (Unreleased)
233233

234234
HDFS-5630. Hook up cache directive and pool usage statistics. (wang)
235235

236+
HDFS-5312. Generate HTTP / HTTPS URL in DFSUtil#getInfoServer() based on the
237+
configured http policy. (Haohui Mai via jing9)
238+
236239
OPTIMIZATIONS
237240
HDFS-5349. DNA_CACHE and DNA_UNCACHE should be by blockId only. (cmccabe)
238241

hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSUtil.java

Lines changed: 54 additions & 20 deletions
Original file line numberDiff line numberDiff line change
@@ -92,6 +92,7 @@
9292
import org.apache.hadoop.util.StringUtils;
9393
import org.apache.hadoop.util.ToolRunner;
9494

95+
import com.google.common.annotations.VisibleForTesting;
9596
import com.google.common.base.Charsets;
9697
import com.google.common.base.Joiner;
9798
import com.google.common.base.Preconditions;
@@ -958,39 +959,71 @@ public static String getNameServiceIdFromAddress(final Configuration conf,
958959
* given namenode rpc address.
959960
* @param conf
960961
* @param namenodeAddr - namenode RPC address
961-
* @param httpsAddress -If true, and if security is enabled, returns server
962-
* https address. If false, returns server http address.
962+
* @param scheme - the scheme (http / https)
963963
* @return server http or https address
964964
* @throws IOException
965965
*/
966-
public static String getInfoServer(InetSocketAddress namenodeAddr,
967-
Configuration conf, boolean httpsAddress) throws IOException {
968-
boolean securityOn = UserGroupInformation.isSecurityEnabled();
969-
String httpAddressKey = (securityOn && httpsAddress) ?
970-
DFS_NAMENODE_HTTPS_ADDRESS_KEY : DFS_NAMENODE_HTTP_ADDRESS_KEY;
971-
String httpAddressDefault = (securityOn && httpsAddress) ?
972-
DFS_NAMENODE_HTTPS_ADDRESS_DEFAULT : DFS_NAMENODE_HTTP_ADDRESS_DEFAULT;
973-
974-
String suffixes[];
966+
public static URI getInfoServer(InetSocketAddress namenodeAddr,
967+
Configuration conf, String scheme) throws IOException {
968+
String[] suffixes = null;
975969
if (namenodeAddr != null) {
976970
// if non-default namenode, try reverse look up
977971
// the nameServiceID if it is available
978972
suffixes = getSuffixIDs(conf, namenodeAddr,
979973
DFSConfigKeys.DFS_NAMENODE_SERVICE_RPC_ADDRESS_KEY,
980974
DFSConfigKeys.DFS_NAMENODE_RPC_ADDRESS_KEY);
975+
}
976+
977+
String authority;
978+
if ("http".equals(scheme)) {
979+
authority = getSuffixedConf(conf, DFS_NAMENODE_HTTP_ADDRESS_KEY,
980+
DFS_NAMENODE_HTTP_ADDRESS_DEFAULT, suffixes);
981+
} else if ("https".equals(scheme)) {
982+
authority = getSuffixedConf(conf, DFS_NAMENODE_HTTPS_ADDRESS_KEY,
983+
DFS_NAMENODE_HTTPS_ADDRESS_DEFAULT, suffixes);
981984
} else {
982-
suffixes = new String[2];
985+
throw new IllegalArgumentException("Invalid scheme:" + scheme);
983986
}
984-
String configuredInfoAddr = getSuffixedConf(conf, httpAddressKey,
985-
httpAddressDefault, suffixes);
987+
986988
if (namenodeAddr != null) {
987-
return substituteForWildcardAddress(configuredInfoAddr,
989+
authority = substituteForWildcardAddress(authority,
988990
namenodeAddr.getHostName());
989-
} else {
990-
return configuredInfoAddr;
991991
}
992+
return URI.create(scheme + "://" + authority);
993+
}
994+
995+
/**
996+
* Lookup the HTTP / HTTPS address of the namenode, and replace its hostname
997+
* with defaultHost when it found out that the address is a wildcard / local
998+
* address.
999+
*
1000+
* @param defaultHost
1001+
* The default host name of the namenode.
1002+
* @param conf
1003+
* The configuration
1004+
* @param scheme
1005+
* HTTP or HTTPS
1006+
* @throws IOException
1007+
*/
1008+
public static URI getInfoServerWithDefaultHost(String defaultHost,
1009+
Configuration conf, final String scheme) throws IOException {
1010+
URI configuredAddr = getInfoServer(null, conf, scheme);
1011+
String authority = substituteForWildcardAddress(
1012+
configuredAddr.getAuthority(), defaultHost);
1013+
return URI.create(scheme + "://" + authority);
1014+
}
1015+
1016+
/**
1017+
* Determine whether HTTP or HTTPS should be used to connect to the remote
1018+
* server. Currently the client only connects to the server via HTTPS if the
1019+
* policy is set to HTTPS_ONLY.
1020+
*
1021+
* @return the scheme (HTTP / HTTPS)
1022+
*/
1023+
public static String getHttpClientScheme(Configuration conf) {
1024+
HttpConfig.Policy policy = DFSUtil.getHttpPolicy(conf);
1025+
return policy == HttpConfig.Policy.HTTPS_ONLY ? "https" : "http";
9921026
}
993-
9941027

9951028
/**
9961029
* Substitute a default host in the case that an address has been configured
@@ -1004,8 +1037,9 @@ public static String getInfoServer(InetSocketAddress namenodeAddr,
10041037
* @return the substituted address
10051038
* @throws IOException if it is a wildcard address and security is enabled
10061039
*/
1007-
public static String substituteForWildcardAddress(String configuredAddress,
1008-
String defaultHost) throws IOException {
1040+
@VisibleForTesting
1041+
static String substituteForWildcardAddress(String configuredAddress,
1042+
String defaultHost) throws IOException {
10091043
InetSocketAddress sockAddr = NetUtils.createSocketAddr(configuredAddress);
10101044
InetSocketAddress defaultSockAddr = NetUtils.createSocketAddr(defaultHost
10111045
+ ":0");

hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/BackupNode.java

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -20,6 +20,7 @@
2020
import java.io.IOException;
2121
import java.net.InetSocketAddress;
2222
import java.net.SocketTimeoutException;
23+
import java.net.URL;
2324

2425
import org.apache.hadoop.classification.InterfaceAudience;
2526
import org.apache.hadoop.conf.Configuration;
@@ -79,7 +80,7 @@ public class BackupNode extends NameNode {
7980
/** Name-node RPC address */
8081
String nnRpcAddress;
8182
/** Name-node HTTP address */
82-
String nnHttpAddress;
83+
URL nnHttpAddress;
8384
/** Checkpoint manager */
8485
Checkpointer checkpointManager;
8586

@@ -313,7 +314,8 @@ private NamespaceInfo handshake(Configuration conf) throws IOException {
313314
NamenodeProtocol.class, UserGroupInformation.getCurrentUser(),
314315
true).getProxy();
315316
this.nnRpcAddress = NetUtils.getHostPortString(nnAddress);
316-
this.nnHttpAddress = NetUtils.getHostPortString(super.getHttpServerAddress(conf));
317+
this.nnHttpAddress = DFSUtil.getInfoServer(nnAddress, conf,
318+
DFSUtil.getHttpClientScheme(conf)).toURL();
317319
// get version and id info from the name-node
318320
NamespaceInfo nsInfo = null;
319321
while(!isStopRequested()) {

hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/Checkpointer.java

Lines changed: 12 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -24,11 +24,14 @@
2424
import java.io.File;
2525
import java.io.IOException;
2626
import java.net.InetSocketAddress;
27+
import java.net.MalformedURLException;
28+
import java.net.URL;
2729
import java.util.List;
2830

2931
import org.apache.commons.logging.Log;
3032
import org.apache.commons.logging.LogFactory;
3133
import org.apache.hadoop.conf.Configuration;
34+
import org.apache.hadoop.hdfs.DFSUtil;
3235
import org.apache.hadoop.hdfs.server.common.HdfsServerConstants.NamenodeRole;
3336
import org.apache.hadoop.hdfs.server.protocol.CheckpointCommand;
3437
import org.apache.hadoop.hdfs.server.protocol.NamenodeCommand;
@@ -61,6 +64,7 @@ class Checkpointer extends Daemon {
6164
private String infoBindAddress;
6265

6366
private CheckpointConf checkpointConf;
67+
private final Configuration conf;
6468

6569
private BackupImage getFSImage() {
6670
return (BackupImage)backupNode.getFSImage();
@@ -74,6 +78,7 @@ private NamenodeProtocol getRemoteNamenodeProxy(){
7478
* Create a connection to the primary namenode.
7579
*/
7680
Checkpointer(Configuration conf, BackupNode bnNode) throws IOException {
81+
this.conf = conf;
7782
this.backupNode = bnNode;
7883
try {
7984
initialize(conf);
@@ -274,10 +279,15 @@ backupNode.nnHttpAddress, getImageListenAddress(),
274279
+ " New Image Size: " + imageSize);
275280
}
276281

277-
private InetSocketAddress getImageListenAddress() {
282+
private URL getImageListenAddress() {
278283
InetSocketAddress httpSocAddr = backupNode.getHttpAddress();
279284
int httpPort = httpSocAddr.getPort();
280-
return new InetSocketAddress(infoBindAddress, httpPort);
285+
try {
286+
return new URL(DFSUtil.getHttpClientScheme(conf) + "://" + infoBindAddress + ":" + httpPort);
287+
} catch (MalformedURLException e) {
288+
// Unreachable
289+
throw new RuntimeException(e);
290+
}
281291
}
282292

283293
static void rollForwardByApplyingLogs(

hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/ClusterJspHelper.java

Lines changed: 20 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -21,6 +21,7 @@
2121
import java.io.IOException;
2222
import java.io.InputStreamReader;
2323
import java.net.InetSocketAddress;
24+
import java.net.URI;
2425
import java.net.URL;
2526
import java.net.URLConnection;
2627
import java.util.ArrayList;
@@ -41,7 +42,6 @@
4142
import org.apache.hadoop.hdfs.DFSUtil;
4243
import org.apache.hadoop.hdfs.DFSUtil.ConfiguredNNAddress;
4344
import org.apache.hadoop.hdfs.protocol.DatanodeInfo.AdminStates;
44-
import org.apache.hadoop.http.HttpConfig;
4545
import org.apache.hadoop.util.StringUtils;
4646
import org.codehaus.jackson.JsonNode;
4747
import org.codehaus.jackson.map.ObjectMapper;
@@ -272,12 +272,13 @@ private int getDatanodeHttpPort(Configuration conf) {
272272
static class NamenodeMXBeanHelper {
273273
private static final ObjectMapper mapper = new ObjectMapper();
274274
private final String host;
275-
private final String httpAddress;
275+
private final URI httpAddress;
276276

277277
NamenodeMXBeanHelper(InetSocketAddress addr, Configuration conf)
278278
throws IOException, MalformedObjectNameException {
279279
this.host = addr.getHostName();
280-
this.httpAddress = DFSUtil.getInfoServer(addr, conf, false);
280+
this.httpAddress = DFSUtil.getInfoServer(addr, conf,
281+
DFSUtil.getHttpClientScheme(conf));
281282
}
282283

283284
/** Get the map corresponding to the JSON string */
@@ -356,7 +357,7 @@ public NamenodeStatus getNamenodeStatus(String props) throws IOException,
356357
nn.blocksCount = getProperty(props, "TotalBlocks").getLongValue();
357358
nn.missingBlocksCount = getProperty(props, "NumberOfMissingBlocks")
358359
.getLongValue();
359-
nn.httpAddress = httpAddress;
360+
nn.httpAddress = httpAddress.toURL();
360361
getLiveNodeCount(getProperty(props, "LiveNodes").getValueAsText(), nn);
361362
getDeadNodeCount(getProperty(props, "DeadNodes").getValueAsText(), nn);
362363
nn.softwareVersion = getProperty(props, "SoftwareVersion").getTextValue();
@@ -591,12 +592,14 @@ public void toXML(XMLOutputter doc) throws IOException {
591592
toXmlItemBlock(doc, "Blocks", Long.toString(nn.blocksCount));
592593
toXmlItemBlock(doc, "Missing Blocks",
593594
Long.toString(nn.missingBlocksCount));
594-
toXmlItemBlockWithLink(doc, nn.liveDatanodeCount + " (" +
595-
nn.liveDecomCount + ")", nn.httpAddress+"/dfsnodelist.jsp?whatNodes=LIVE",
596-
"Live Datanode (Decommissioned)");
597-
toXmlItemBlockWithLink(doc, nn.deadDatanodeCount + " (" +
598-
nn.deadDecomCount + ")", nn.httpAddress+"/dfsnodelist.jsp?whatNodes=DEAD"
599-
, "Dead Datanode (Decommissioned)");
595+
toXmlItemBlockWithLink(doc, nn.liveDatanodeCount + " ("
596+
+ nn.liveDecomCount + ")", new URL(nn.httpAddress,
597+
"/dfsnodelist.jsp?whatNodes=LIVE"),
598+
"Live Datanode (Decommissioned)");
599+
toXmlItemBlockWithLink(doc, nn.deadDatanodeCount + " ("
600+
+ nn.deadDecomCount + ")", new URL(nn.httpAddress,
601+
"/dfsnodelist.jsp?whatNodes=DEAD"),
602+
"Dead Datanode (Decommissioned)");
600603
toXmlItemBlock(doc, "Software Version", nn.softwareVersion);
601604
doc.endTag(); // node
602605
}
@@ -625,7 +628,7 @@ static class NamenodeStatus {
625628
int liveDecomCount = 0;
626629
int deadDatanodeCount = 0;
627630
int deadDecomCount = 0;
628-
String httpAddress = null;
631+
URL httpAddress = null;
629632
String softwareVersion = "";
630633
}
631634

@@ -763,7 +766,8 @@ public void toXML(XMLOutputter doc) throws IOException {
763766
.equals(DecommissionStates.UNKNOWN.toString()))) {
764767
doc.startTag("node");
765768
// dn
766-
toXmlItemBlockWithLink(doc, dnhost, (dnhost+":"+httpPort),"DataNode");
769+
toXmlItemBlockWithLink(doc, dnhost, new URL("http", dnhost, httpPort,
770+
""), "DataNode");
767771

768772
// overall status first
769773
toXmlItemBlock(doc, OVERALL_STATUS, overallStatus);
@@ -823,11 +827,11 @@ private static void toXmlItemBlock(XMLOutputter doc, String key, String value)
823827
* link="http://hostname:50070" />
824828
*/
825829
private static void toXmlItemBlockWithLink(XMLOutputter doc, String value,
826-
String url, String label) throws IOException {
830+
URL url, String label) throws IOException {
827831
doc.startTag("item");
828832
doc.attribute("label", label);
829833
doc.attribute("value", value);
830-
doc.attribute("link", "///" + url);
834+
doc.attribute("link", url.toString());
831835
doc.endTag(); // item
832836
}
833837

@@ -885,7 +889,7 @@ private static String readOutput(URL url) throws IOException {
885889
return out.toString();
886890
}
887891

888-
private static String queryMbean(String httpAddress, Configuration conf)
892+
private static String queryMbean(URI httpAddress, Configuration conf)
889893
throws IOException {
890894
/**
891895
* Although the other namenode might support HTTPS, it is fundamentally
@@ -896,7 +900,7 @@ private static String queryMbean(String httpAddress, Configuration conf)
896900
*
897901
* As a result, we just hard code the connection as an HTTP connection.
898902
*/
899-
URL url = new URL("http://" + httpAddress + JMX_QRY);
903+
URL url = new URL(httpAddress.toURL(), JMX_QRY);
900904
return readOutput(url);
901905
}
902906
/**

hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/GetImageServlet.java

Lines changed: 10 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -21,6 +21,7 @@
2121
import java.util.*;
2222
import java.io.*;
2323
import java.net.InetSocketAddress;
24+
import java.net.URL;
2425

2526
import javax.servlet.ServletContext;
2627
import javax.servlet.ServletException;
@@ -31,10 +32,8 @@
3132
import org.apache.hadoop.hdfs.DFSConfigKeys;
3233
import org.apache.hadoop.net.NetUtils;
3334
import org.apache.hadoop.security.SecurityUtil;
34-
3535
import org.apache.commons.logging.Log;
3636
import org.apache.commons.logging.LogFactory;
37-
3837
import org.apache.hadoop.classification.InterfaceAudience;
3938
import org.apache.hadoop.conf.Configuration;
4039
import org.apache.hadoop.hdfs.DFSUtil;
@@ -87,8 +86,8 @@ public void doGet(final HttpServletRequest request,
8786
ServletContext context = getServletContext();
8887
final FSImage nnImage = NameNodeHttpServer.getFsImageFromContext(context);
8988
final GetImageParams parsedParams = new GetImageParams(request, response);
90-
final Configuration conf =
91-
(Configuration)getServletContext().getAttribute(JspHelper.CURRENT_CONF);
89+
final Configuration conf = (Configuration) context
90+
.getAttribute(JspHelper.CURRENT_CONF);
9291

9392
if (UserGroupInformation.isSecurityEnabled() &&
9493
!isValidRequestor(context, request.getUserPrincipal().getName(), conf)) {
@@ -163,7 +162,7 @@ public Void run() throws Exception {
163162
// issue a HTTP get request to download the new fsimage
164163
MD5Hash downloadImageDigest =
165164
TransferFsImage.downloadImageToStorage(
166-
parsedParams.getInfoServer(), txid,
165+
parsedParams.getInfoServer(conf), txid,
167166
nnImage.getStorage(), true);
168167
nnImage.saveDigestAndRenameCheckpointImage(txid, downloadImageDigest);
169168

@@ -309,7 +308,9 @@ static String getParamStringForLog(RemoteEditLog log,
309308
}
310309

311310
static String getParamStringToPutImage(long txid,
312-
InetSocketAddress imageListenAddress, Storage storage) {
311+
URL url, Storage storage) {
312+
InetSocketAddress imageListenAddress = NetUtils.createSocketAddr(url
313+
.getAuthority());
313314
String machine = !imageListenAddress.isUnresolved()
314315
&& imageListenAddress.getAddress().isAnyLocalAddress() ? null
315316
: imageListenAddress.getHostName();
@@ -419,11 +420,11 @@ boolean isPutImage() {
419420
return isPutImage;
420421
}
421422

422-
String getInfoServer() throws IOException{
423+
URL getInfoServer(Configuration conf) throws IOException {
423424
if (machineName == null || remoteport == 0) {
424-
throw new IOException ("MachineName and port undefined");
425+
throw new IOException("MachineName and port undefined");
425426
}
426-
return machineName + ":" + remoteport;
427+
return new URL(DFSUtil.getHttpClientScheme(conf), machineName, remoteport, "");
427428
}
428429

429430
boolean shouldFetchLatest() {

0 commit comments

Comments
 (0)