Skip to content

Remove dangling spaces wherever found. (#127475) #127607

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 1 commit into from
May 1, 2025
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 5 additions & 0 deletions docs/changelog/127475.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
pr: 127475
summary: Remove dangling spaces wherever found
area: Security
type: bug
issues: []
Original file line number Diff line number Diff line change
Expand Up @@ -333,8 +333,11 @@ private void throwExpectedStartObject(XContentParser parser, XContentParser.Toke

private static void throwMissingRequiredFields(List<String[]> requiredFields) {
final StringBuilder message = new StringBuilder();
for (String[] fields : requiredFields) {
message.append("Required one of fields ").append(Arrays.toString(fields)).append(", but none were specified. ");
for (int i = 0; i < requiredFields.size(); i++) {
if (i > 0) {
message.append(" ");
}
message.append("Required one of fields ").append(Arrays.toString(requiredFields.get(i))).append(", but none were specified.");
}
throw new IllegalArgumentException(message.toString());
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -1004,7 +1004,7 @@ public void testMultipleRequiredFieldSet() throws IOException {
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> objectParser.apply(parser, null));
assertThat(
e.getMessage(),
equalTo("Required one of fields [a, b], but none were specified. " + "Required one of fields [c, d], but none were specified. ")
equalTo("Required one of fields [a, b], but none were specified. " + "Required one of fields [c, d], but none were specified.")
);
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -299,7 +299,7 @@ static void updateShardAllocationStatus(
NAME,
"increase_shard_limit_index_setting",
"Elasticsearch isn't allowed to allocate some shards from these indices to any data nodes because each node has reached the index "
+ "shard limit. ",
+ "shard limit.",
"Increase the values for the ["
+ INDEX_TOTAL_SHARDS_PER_NODE_SETTING.getKey()
+ "] index setting on each index or add more nodes to the target tiers.",
Expand All @@ -316,7 +316,7 @@ static void updateShardAllocationStatus(
"increase_shard_limit_index_setting:tier:" + tier,
"Elasticsearch isn't allowed to allocate some shards from these indices because each node in the ["
+ tier
+ "] tier has reached the index shard limit. ",
+ "] tier has reached the index shard limit.",
"Increase the values for the ["
+ INDEX_TOTAL_SHARDS_PER_NODE_SETTING.getKey()
+ "] index setting on each index or add more nodes to the target tiers.",
Expand Down Expand Up @@ -347,7 +347,7 @@ static void updateShardAllocationStatus(
"increase_shard_limit_cluster_setting:tier:" + tier,
"Elasticsearch isn't allowed to allocate some shards from these indices because each node in the ["
+ tier
+ "] tier has reached the cluster shard limit. ",
+ "] tier has reached the cluster shard limit.",
"Increase the values for the ["
+ CLUSTER_TOTAL_SHARDS_PER_NODE_SETTING.getKey()
+ "] cluster setting or add more nodes to the target tiers.",
Expand Down Expand Up @@ -395,7 +395,7 @@ static void updateShardAllocationStatus(
NAME,
"migrate_data_tiers_include_data",
"Elasticsearch isn't allowed to allocate some shards from these indices to any nodes in the desired data tiers because the "
+ "indices are configured with allocation filter rules that are incompatible with the nodes in this tier. ",
+ "indices are configured with allocation filter rules that are incompatible with the nodes in this tier.",
"Remove ["
+ INDEX_ROUTING_INCLUDE_GROUP_PREFIX
+ ".data] from the index settings or try migrating to data tiers by first stopping ILM [POST /_ilm/stop] and then using "
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -277,7 +277,7 @@ public void checkClusterHealthWithRetriesWaitingForCluster(String username, Secu
checkClusterHealthWithRetriesWaitingForCluster(username, password, retries);
return;
} else {
throw new IllegalStateException("Failed to determine the health of the cluster. ", e);
throw new IllegalStateException("Failed to determine the health of the cluster.", e);
}
}
final int responseStatus = response.getHttpStatus();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -494,7 +494,7 @@ private static Map<String, RoleDescriptor> initializeReservedRoles() {
+ "This role grants monitor_ml cluster privileges, read access to the .ml-notifications and .ml-anomalies* indices "
+ "(which store machine learning results), and write access to .ml-annotations* indices. "
+ "Machine learning users also need index privileges for source and destination indices "
+ "and roles that grant access to Kibana. "
+ "and roles that grant access to Kibana."
)
),
entry(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -211,7 +211,7 @@ static DeprecationIssue checkMultipleDataPaths(
"Specifying multiple data paths is deprecated",
"https://ela.st/es-deprecation-7-multiple-paths",
"The [path.data] setting contains a list of paths. Specify a single path as a string. Use RAID or other system level "
+ "features to utilize multiple disks. If multiple data paths are configured, the node will fail to start in 8.0. ",
+ "features to utilize multiple disks. If multiple data paths are configured, the node will fail to start in 8.0.",
false,
null
);
Expand All @@ -231,7 +231,7 @@ static DeprecationIssue checkDataPathsList(
"Multiple data paths are not supported",
"https://ela.st/es-deprecation-7-multiple-paths",
"The [path.data] setting contains a list of paths. Specify a single path as a string. Use RAID or other system level "
+ "features to utilize multiple disks. If multiple data paths are configured, the node will fail to start in 8.0. ",
+ "features to utilize multiple disks. If multiple data paths are configured, the node will fail to start in 8.0.",
false,
null
);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -111,7 +111,7 @@ public void testMultipleDataPaths() {
issue.getDetails(),
equalTo(
"The [path.data] setting contains a list of paths. Specify a single path as a string. Use RAID or other system level "
+ "features to utilize multiple disks. If multiple data paths are configured, the node will fail to start in 8.0. "
+ "features to utilize multiple disks. If multiple data paths are configured, the node will fail to start in 8.0."
)
);
String url = "https://ela.st/es-deprecation-7-multiple-paths";
Expand All @@ -136,7 +136,7 @@ public void testDataPathsList() {
issue.getDetails(),
equalTo(
"The [path.data] setting contains a list of paths. Specify a single path as a string. Use RAID or other system level "
+ "features to utilize multiple disks. If multiple data paths are configured, the node will fail to start in 8.0. "
+ "features to utilize multiple disks. If multiple data paths are configured, the node will fail to start in 8.0."
)
);
String url = "https://ela.st/es-deprecation-7-multiple-paths";
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -84,7 +84,7 @@ public void testParsingWithMissingRequiredField() throws IOException {

try (XContentParser xContentParser = createXContentParser(json)) {
Exception e = expectThrows(IllegalArgumentException.class, () -> parser().parse(xContentParser, context));
assertEquals(Strings.format("Required one of fields [%s], but none were specified. ", field), e.getMessage());
assertEquals(Strings.format("Required one of fields [%s], but none were specified.", field), e.getMessage());
}
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -152,7 +152,7 @@ public QueryString(
name = "allow_wildcard",
type = "boolean",
valueHint = { "false", "true" },
description = "If true, the query attempts to analyze wildcard terms in the query string. Defaults to false. "
description = "If true, the query attempts to analyze wildcard terms in the query string. Defaults to false."
),
@MapParam.MapParamEntry(
name = "analyzer",
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -337,7 +337,7 @@ private ElasticsearchStatusException unsupportedStreamingTaskException(Request r
}

private static ElasticsearchStatusException unknownServiceException(String service, String inferenceId) {
return new ElasticsearchStatusException("Unknown service [{}] for model [{}]. ", RestStatus.BAD_REQUEST, service, inferenceId);
return new ElasticsearchStatusException("Unknown service [{}] for model [{}]", RestStatus.BAD_REQUEST, service, inferenceId);
}

private static ElasticsearchStatusException requestModelTaskTypeMismatchException(TaskType requested, TaskType expected) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -174,7 +174,7 @@ private void parseModels(List<UnparsedModel> unparsedModels, ActionListener<GetI

private ElasticsearchStatusException serviceNotFoundException(String service, String inferenceId) {
throw new ElasticsearchStatusException(
"Unknown service [{}] for inference endpoint [{}]. ",
"Unknown service [{}] for inference endpoint [{}].",
RestStatus.INTERNAL_SERVER_ERROR,
service,
inferenceId
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -167,7 +167,7 @@ public void testMetricsAfterMissingService() {

verify(listener).onFailure(assertArg(e -> {
assertThat(e, isA(ElasticsearchException.class));
assertThat(e.getMessage(), is("Unknown service [" + serviceId + "] for model [" + inferenceId + "]. "));
assertThat(e.getMessage(), is("Unknown service [" + serviceId + "] for model [" + inferenceId + "]"));
assertThat(((ElasticsearchException) e).status(), is(RestStatus.BAD_REQUEST));
}));
verify(inferenceStats.inferenceDuration()).record(anyLong(), assertArg(attributes -> {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -817,7 +817,7 @@ private static boolean askCertSigningRequest(Terminal terminal) {

terminal.println("A CSR is used when you want your certificate to be created by an existing");
terminal.println("Certificate Authority (CA) that you do not control (that is, you don't have");
terminal.println("access to the keys for that CA). ");
terminal.println("access to the keys for that CA).");
terminal.println("");
terminal.println("If you are in a corporate environment with a central security team, then you");
terminal.println("may have an existing Corporate CA that can generate your certificate for you.");
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -1338,7 +1338,7 @@ private void innerRefresh(
);
} else {
logger.info(
"failed to update the original token document [{}] after all retries, the update result was [{}]. ",
"failed to update the original token document [{}] after all retries, the update result was [{}].",
tokenDoc.id(),
updateResponse.getResult()
);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -398,7 +398,7 @@ void checkElasticKeystorePasswordValid(Terminal terminal, Environment env) throw
terminal.errorPrintln("");
throw new UserException(
ExitCodes.CONFIG,
"Failed to establish SSL connection to elasticsearch at " + route.toString() + ". ",
"Failed to establish SSL connection to elasticsearch at " + route.toString() + ".",
e
);
} catch (IOException e) {
Expand Down Expand Up @@ -557,7 +557,7 @@ private void changeUserPassword(String user, SecureString password, Terminal ter
terminal.errorPrintln("* Try running this tool again.");
terminal.errorPrintln("* Try running with the --verbose parameter for additional messages.");
terminal.errorPrintln("* Check the elasticsearch logs for additional error details.");
terminal.errorPrintln("* Use the change password API manually. ");
terminal.errorPrintln("* Use the change password API manually.");
terminal.errorPrintln("");
throw new UserException(ExitCodes.TEMP_FAILURE, "Failed to set password for user [" + user + "].");
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -235,7 +235,7 @@ public void authenticate(OpenIdConnectToken token, final ActionListener<JWTClaim
// Don't wrap in a new ElasticsearchSecurityException
listener.onFailure(e);
} catch (Exception e) {
listener.onFailure(new ElasticsearchSecurityException("Failed to consume the OpenID connect response. ", e));
listener.onFailure(new ElasticsearchSecurityException("Failed to consume the OpenID connect response.", e));
}
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -121,7 +121,7 @@ private SamlAttributes authenticateResponse(Element element, Collection<String>
logger.debug(
"The Attribute Statements of SAML Response with ID [{}] contained no attributes and the SAML Assertion Subject "
+ "did not contain a SAML NameID. Please verify that the Identity Provider configuration with regards to attribute "
+ "release is correct. ",
+ "release is correct.",
response.getID()
);
throw samlException("Could not process any SAML attributes in {}", response.getElementQName());
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -305,7 +305,7 @@ static RoleDescriptor parseRoleDescriptor(
Validation.Error validationError = Validation.Roles.validateRoleName(roleName, false);
if (validationError != null) {
logger.error(
"invalid role definition [{}] in roles file [{}]. invalid role name - {}. skipping role... ",
"invalid role definition [{}] in roles file [{}]. invalid role name - {}. skipping role...",
roleName,
path.toAbsolutePath(),
validationError
Expand Down Expand Up @@ -392,7 +392,7 @@ private static RoleDescriptor checkDescriptor(
Validation.Error validationError = Validation.Roles.validateRoleDescription(descriptor.getDescription());
if (validationError != null) {
logger.error(
"invalid role definition [{}] in roles file [{}]. invalid description - {}. skipping role... ",
"invalid role definition [{}] in roles file [{}]. invalid description - {}. skipping role...",
roleName,
path.toAbsolutePath(),
validationError
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -213,7 +213,7 @@ private void checkClusterHealthWithRetries(
try {
response = client.execute("GET", clusterHealthUrl, username, password, () -> null, CommandLineHttpClient::responseBuilder);
} catch (Exception e) {
throw new UserException(ExitCodes.UNAVAILABLE, "Failed to determine the health of the cluster. ", e);
throw new UserException(ExitCodes.UNAVAILABLE, "Failed to determine the health of the cluster.", e);
}
final int responseStatus = response.getHttpStatus();
if (responseStatus != HttpURLConnection.HTTP_OK) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -261,7 +261,7 @@ private void throwFailedToParse(JsonParser parser, String currentFieldName, Exce
private static void throwMissingRequiredFields(List<String[]> requiredFields) {
final StringBuilder message = new StringBuilder();
for (String[] fields : requiredFields) {
message.append("Required one of fields ").append(Arrays.toString(fields)).append(", but none were specified. ");
message.append("Required one of fields ").append(Arrays.toString(fields)).append(", but none were specified.");
}
throw new ParseException(message.toString());
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -467,24 +467,31 @@ private void waitForTransformStopped(
return;
} else {
StringBuilder message = new StringBuilder();
boolean lineAdded = false;
if (persistentTaskIds.size() - stillRunningTasks.size() - exceptions.size() > 0) {
message.append(optionalSpace(lineAdded));
message.append("Successfully stopped [");
message.append(persistentTaskIds.size() - stillRunningTasks.size() - exceptions.size());
message.append("] transforms. ");
message.append("] transforms.");
lineAdded = true;
}

if (exceptions.size() > 0) {
message.append(optionalSpace(lineAdded));
message.append("Could not stop the transforms ");
message.append(exceptions.keySet());
message.append(" as they were failed. Use force stop to stop the transforms. ");
message.append(" as they were failed. Use force stop to stop the transforms.");
lineAdded = true;
}

if (stillRunningTasks.size() > 0) {
message.append(optionalSpace(lineAdded));
message.append("Could not stop the transforms ");
message.append(stillRunningTasks);
message.append(" as they timed out [");
message.append(timeout.toString());
message.append("].");
lineAdded = true;
}

listener.onFailure(new ElasticsearchStatusException(message.toString(), RestStatus.REQUEST_TIMEOUT));
Expand Down Expand Up @@ -542,4 +549,8 @@ static ActionListener<Response> cancelTransformTasksListener(
}
});
}

private static String optionalSpace(boolean spaceNeeded) {
return spaceNeeded ? " " : "";
}
}