|
36 | 36 | import com.graphhopper.storage.*;
|
37 | 37 | import com.graphhopper.util.*;
|
38 | 38 | import com.graphhopper.util.exceptions.ConnectionNotFoundException;
|
39 |
| -import com.graphhopper.util.shapes.BBox; |
40 | 39 | import com.graphhopper.util.shapes.GHPoint;
|
41 | 40 | import org.slf4j.Logger;
|
42 | 41 | import org.slf4j.LoggerFactory;
|
@@ -227,28 +226,6 @@ public void createLandmarks() {
|
227 | 226 | landmarkWeightDA.setShort(pointer, (short) SHORT_INFINITY);
|
228 | 227 | }
|
229 | 228 |
|
230 |
| - String additionalInfo = ""; |
231 |
| - // guess the factor |
232 |
| - if (factor <= 0) { |
233 |
| - // A 'factor' is necessary to store the weight in just a short value but without loosing too much precision. |
234 |
| - // This factor is rather delicate to pick, we estimate it through the graph boundaries its maximum distance. |
235 |
| - // For small areas we use max_bounds_dist*X and otherwise we use a big fixed value for this distance. |
236 |
| - // If we would pick the distance too big for small areas this could lead to (slightly) suboptimal routes as there |
237 |
| - // will be too big rounding errors. But picking it too small is dangerous regarding performance |
238 |
| - // e.g. for Germany at least 1500km is very important otherwise speed is at least twice as slow e.g. for just 1000km |
239 |
| - BBox bounds = graph.getBounds(); |
240 |
| - double distanceInMeter = Helper.DIST_EARTH.calcDist(bounds.maxLat, bounds.maxLon, bounds.minLat, bounds.minLon) * 7; |
241 |
| - if (distanceInMeter > 50_000 * 7 || /* for tests and convenience we do for now: */ !bounds.isValid()) |
242 |
| - distanceInMeter = 30_000_000; |
243 |
| - |
244 |
| - double maxWeight = weighting.getMinWeight(distanceInMeter); |
245 |
| - setMaximumWeight(maxWeight); |
246 |
| - additionalInfo = ", maxWeight:" + maxWeight + ", from max distance:" + distanceInMeter / 1000f + "km"; |
247 |
| - } |
248 |
| - |
249 |
| - if (logDetails) |
250 |
| - LOGGER.info("init landmarks for subnetworks with node count greater than " + minimumNodes + " with factor:" + factor + additionalInfo); |
251 |
| - |
252 | 229 | int[] empty = new int[landmarks];
|
253 | 230 | Arrays.fill(empty, UNSET_SUBNETWORK);
|
254 | 231 | landmarkIDs.add(empty);
|
@@ -277,12 +254,30 @@ public void createLandmarks() {
|
277 | 254 | LOGGER.info("Calculated " + graphComponents.size() + " subnetworks via tarjan in " + sw.stop().getSeconds() + "s, " + Helper.getMemInfo());
|
278 | 255 |
|
279 | 256 | EdgeExplorer tmpExplorer = graph.createEdgeExplorer(new RequireBothDirectionsEdgeFilter(encoder));
|
| 257 | + String additionalInfo = ""; |
| 258 | + // guess the factor |
| 259 | + if (factor <= 0) { |
| 260 | + // A 'factor' is necessary to store the weight in just a short value but without loosing too much precision. |
| 261 | + // This factor is rather delicate to pick, we estimate it from an exploration with some "test landmarks", |
| 262 | + // see estimateMaxWeight. If we pick the distance too big for small areas this could lead to (slightly) |
| 263 | + // suboptimal routes as there will be too big rounding errors. But picking it too small is bad for performance |
| 264 | + // e.g. for Germany at least 1500km is very important otherwise speed is at least twice as slow e.g. for 1000km |
| 265 | + double maxWeight = estimateMaxWeight(tmpExplorer, graphComponents, blockedEdges); |
| 266 | + setMaximumWeight(maxWeight); |
| 267 | + additionalInfo = ", maxWeight:" + maxWeight + " from quick estimation"; |
| 268 | + } |
| 269 | + |
| 270 | + if (logDetails) |
| 271 | + LOGGER.info("init landmarks for subnetworks with node count greater than " + minimumNodes + " with factor:" + factor + additionalInfo); |
280 | 272 |
|
281 | 273 | int nodes = 0;
|
282 | 274 | for (IntArrayList subnetworkIds : graphComponents) {
|
283 | 275 | nodes += subnetworkIds.size();
|
284 | 276 | if (subnetworkIds.size() < minimumNodes)
|
285 | 277 | continue;
|
| 278 | + if (factor <= 0) |
| 279 | + throw new IllegalStateException("factor wasn't initialized " + factor + ", subnetworks:" |
| 280 | + + graphComponents.size() + ", minimumNodes:" + minimumNodes + ", current size:" + subnetworkIds.size()); |
286 | 281 |
|
287 | 282 | int index = subnetworkIds.size() - 1;
|
288 | 283 | // ensure start node is reachable from both sides and no subnetwork is associated
|
@@ -335,6 +330,56 @@ public void createLandmarks() {
|
335 | 330 | initialized = true;
|
336 | 331 | }
|
337 | 332 |
|
| 333 | + /** |
| 334 | + * This method returns the maximum weight for the graph starting from the landmarks |
| 335 | + */ |
| 336 | + private double estimateMaxWeight(EdgeExplorer requireBothDirExplorer, List<IntArrayList> graphComponents, IntHashSet blockedEdges) { |
| 337 | + double maxWeight = 0; |
| 338 | + int searchedSubnetworks = 0; |
| 339 | + for (IntArrayList subnetworkIds : graphComponents) { |
| 340 | + if (subnetworkIds.size() < minimumNodes) |
| 341 | + continue; |
| 342 | + |
| 343 | + searchedSubnetworks++; |
| 344 | + int index = subnetworkIds.size() - 1; |
| 345 | + SUBNETWORK: |
| 346 | + for (; index >= 0; index--) { |
| 347 | + int nextStartNode = subnetworkIds.get(index); |
| 348 | + if (GHUtility.count(requireBothDirExplorer.setBaseNode(nextStartNode)) > 0) { |
| 349 | + Weighting initWeighting = lmSelectionWeighting; |
| 350 | + int startNode = nextStartNode; |
| 351 | + |
| 352 | + for (int i = 0; i < landmarks; i++) { |
| 353 | + // search potential landmark |
| 354 | + LandmarkExplorer explorer = new LandmarkExplorer(graph, this, initWeighting, traversalMode, true); |
| 355 | + explorer.setStartNode(startNode); |
| 356 | + explorer.setFilter(blockedEdges, true, true); |
| 357 | + explorer.runAlgo(); |
| 358 | + if (explorer.getFromCount() < minimumNodes) |
| 359 | + continue; |
| 360 | + |
| 361 | + // from landmark we can explore the graph and get the max weight |
| 362 | + int potentialLandmarkNodeId = startNode = explorer.getLastEntry().adjNode; |
| 363 | + explorer = new LandmarkExplorer(graph, this, weighting, traversalMode, true); |
| 364 | + explorer.setStartNode(potentialLandmarkNodeId); |
| 365 | + explorer.setFilter(blockedEdges, true, true); |
| 366 | + explorer.runAlgo(); |
| 367 | + maxWeight = Math.max(maxWeight, explorer.getLastEntry().weight); |
| 368 | + break SUBNETWORK; |
| 369 | + } |
| 370 | + } |
| 371 | + } |
| 372 | + } |
| 373 | + |
| 374 | + if (maxWeight <= 0 && searchedSubnetworks > 0) |
| 375 | + throw new IllegalStateException("max weight wasn't set although " + searchedSubnetworks + " subnetworks were searched (total " + graphComponents.size() + "), minimumNodes:" + minimumNodes); |
| 376 | + |
| 377 | + // we have to increase maxWeight slightly as it is only an approximation towards the maximum weight |
| 378 | + // TODO NOW but why is this necessary when we loop through all the landmarks and only those weights are used? We shouldn't need to determine All-to-All |
| 379 | + // a value too small <= 1.002 or too high >= 1.05 will let GraphHopperIT.testImportThenLoadLM fail |
| 380 | + return maxWeight * 1.01; |
| 381 | + } |
| 382 | + |
338 | 383 | /**
|
339 | 384 | * This method creates landmarks for the specified subnetwork (integer list)
|
340 | 385 | *
|
@@ -370,40 +415,14 @@ private boolean createLandmarksForSubnetwork(final int startNode, final byte[] s
|
370 | 415 | }
|
371 | 416 |
|
372 | 417 | if (pickedPrecalculatedLandmarks) {
|
373 |
| - LOGGER.info("Picked " + tmpLandmarkNodeIds.length + " landmark suggestions, skipped expensive landmark determination"); |
| 418 | + LOGGER.info("Picked " + tmpLandmarkNodeIds.length + " landmark suggestions, skip finding landmarks"); |
374 | 419 | } else {
|
375 |
| - // 1a) pick landmarks via special weighting for a better geographical spreading |
376 |
| - Weighting initWeighting = lmSelectionWeighting; |
377 |
| - LandmarkExplorer explorer = new LandmarkExplorer(graph, this, initWeighting, traversalMode, true); |
378 |
| - explorer.setStartNode(startNode); |
379 |
| - explorer.setFilter(blockedEdges, true, true); |
380 |
| - explorer.runAlgo(); |
381 |
| - |
| 420 | + LandmarkExplorer explorer = findLandmarks(tmpLandmarkNodeIds, startNode, blockedEdges, logOffset); |
382 | 421 | if (explorer.getFromCount() < minimumNodes) {
|
383 | 422 | // too small subnetworks are initialized with special id==0
|
384 | 423 | explorer.setSubnetworks(subnetworks, UNCLEAR_SUBNETWORK);
|
385 | 424 | return false;
|
386 | 425 | }
|
387 |
| - |
388 |
| - // 1b) we have one landmark, now determine the other landmarks |
389 |
| - tmpLandmarkNodeIds[0] = explorer.getLastNode(); |
390 |
| - for (int lmIdx = 0; lmIdx < tmpLandmarkNodeIds.length - 1; lmIdx++) { |
391 |
| - if (Thread.currentThread().isInterrupted()) { |
392 |
| - throw new RuntimeException("Thread was interrupted"); |
393 |
| - } |
394 |
| - explorer = new LandmarkExplorer(graph, this, initWeighting, traversalMode, true); |
395 |
| - explorer.setFilter(blockedEdges, true, true); |
396 |
| - // set all current landmarks as start so that the next getLastNode is hopefully a "far away" node |
397 |
| - for (int j = 0; j < lmIdx + 1; j++) { |
398 |
| - explorer.setStartNode(tmpLandmarkNodeIds[j]); |
399 |
| - } |
400 |
| - explorer.runAlgo(); |
401 |
| - tmpLandmarkNodeIds[lmIdx + 1] = explorer.getLastNode(); |
402 |
| - if (logDetails && lmIdx % logOffset == 0) |
403 |
| - LOGGER.info("Finding landmarks [" + lmConfig + "] in network [" + explorer.getVisitedNodes() + "]. " |
404 |
| - + "Progress " + (int) (100.0 * lmIdx / tmpLandmarkNodeIds.length) + "%, " + Helper.getMemInfo()); |
405 |
| - } |
406 |
| - |
407 | 426 | if (logDetails)
|
408 | 427 | LOGGER.info("Finished searching landmarks for subnetwork " + subnetworkId + " of size " + explorer.getVisitedNodes());
|
409 | 428 | }
|
@@ -521,7 +540,7 @@ int getToWeight(int landmarkIndex, int node) {
|
521 | 540 | final boolean setWeight(long pointer, double value) {
|
522 | 541 | double tmpVal = value / factor;
|
523 | 542 | if (tmpVal > Integer.MAX_VALUE)
|
524 |
| - throw new UnsupportedOperationException("Cannot store infinity explicitely, pointer=" + pointer + ", value: " + value); |
| 543 | + throw new UnsupportedOperationException("Cannot store infinity explicitly, pointer=" + pointer + ", value=" + value + ", factor=" + factor); |
525 | 544 |
|
526 | 545 | if (tmpVal >= SHORT_MAX) {
|
527 | 546 | landmarkWeightDA.setShort(pointer, (short) SHORT_MAX);
|
@@ -551,7 +570,8 @@ boolean chooseActiveLandmarks(int fromNode, int toNode, int[] activeLandmarkIndi
|
551 | 570 | if (subnetworkFrom <= UNCLEAR_SUBNETWORK || subnetworkTo <= UNCLEAR_SUBNETWORK)
|
552 | 571 | return false;
|
553 | 572 | if (subnetworkFrom != subnetworkTo) {
|
554 |
| - throw new ConnectionNotFoundException("Connection between locations not found. Different subnetworks " + subnetworkFrom + " vs. " + subnetworkTo, new HashMap<String, Object>()); |
| 573 | + throw new ConnectionNotFoundException("Connection between locations not found. Different subnetworks " + subnetworkFrom |
| 574 | + + " vs. " + subnetworkTo, new HashMap<String, Object>()); |
555 | 575 | }
|
556 | 576 |
|
557 | 577 | int[] tmpIDs = landmarkIDs.get(subnetworkFrom);
|
@@ -714,15 +734,46 @@ int getBaseNodes() {
|
714 | 734 | return graph.getNodes();
|
715 | 735 | }
|
716 | 736 |
|
| 737 | + private LandmarkExplorer findLandmarks(int[] landmarkNodeIdsToReturn, int startNode, IntHashSet blockedEdges, int logOffset) { |
| 738 | + // 1a) pick landmarks via special weighting for a better geographical spreading |
| 739 | + Weighting initWeighting = lmSelectionWeighting; |
| 740 | + LandmarkExplorer explorer = new LandmarkExplorer(graph, this, initWeighting, traversalMode, true); |
| 741 | + explorer.setStartNode(startNode); |
| 742 | + explorer.setFilter(blockedEdges, true, true); |
| 743 | + explorer.runAlgo(); |
| 744 | + |
| 745 | + if (explorer.getFromCount() >= minimumNodes) { |
| 746 | + // 1b) we have one landmark, now determine the other landmarks |
| 747 | + landmarkNodeIdsToReturn[0] = explorer.getLastEntry().adjNode; |
| 748 | + for (int lmIdx = 0; lmIdx < landmarkNodeIdsToReturn.length - 1; lmIdx++) { |
| 749 | + if (Thread.currentThread().isInterrupted()) { |
| 750 | + throw new RuntimeException("Thread was interrupted"); |
| 751 | + } |
| 752 | + explorer = new LandmarkExplorer(graph, this, initWeighting, traversalMode, true); |
| 753 | + explorer.setFilter(blockedEdges, true, true); |
| 754 | + // set all current landmarks as start so that the next getLastNode is hopefully a "far away" node |
| 755 | + for (int j = 0; j < lmIdx + 1; j++) { |
| 756 | + explorer.setStartNode(landmarkNodeIdsToReturn[j]); |
| 757 | + } |
| 758 | + explorer.runAlgo(); |
| 759 | + landmarkNodeIdsToReturn[lmIdx + 1] = explorer.getLastEntry().adjNode; |
| 760 | + if (logDetails && lmIdx % logOffset == 0) |
| 761 | + LOGGER.info("Finding landmarks [" + lmConfig + "] in network [" + explorer.getVisitedNodes() + "]. " |
| 762 | + + "Progress " + (int) (100.0 * lmIdx / landmarkNodeIdsToReturn.length) + "%, " + Helper.getMemInfo()); |
| 763 | + } |
| 764 | + } |
| 765 | + return explorer; |
| 766 | + } |
| 767 | + |
717 | 768 | /**
|
718 | 769 | * This class is used to calculate landmark location (equally distributed).
|
719 | 770 | * It derives from DijkstraBidirectionRef, but is only used as forward or backward search.
|
720 | 771 | */
|
721 | 772 | private static class LandmarkExplorer extends DijkstraBidirectionRef {
|
722 |
| - private int lastNode; |
723 | 773 | // todo: rename 'from' to 'reverse' (and flip it) ? 'from' is used in many places for node ids and 'reverse' is mostly used for the direction
|
724 | 774 | private boolean from;
|
725 | 775 | private final LandmarkStorage lms;
|
| 776 | + private SPTEntry lastEntry; |
726 | 777 |
|
727 | 778 | public LandmarkExplorer(Graph g, LandmarkStorage lms, Weighting weighting, TraversalMode tMode, boolean from) {
|
728 | 779 | super(g, weighting, tMode);
|
@@ -754,25 +805,23 @@ int getFromCount() {
|
754 | 805 | return bestWeightMapFrom.size();
|
755 | 806 | }
|
756 | 807 |
|
757 |
| - int getToCount() { |
758 |
| - return bestWeightMapTo.size(); |
759 |
| - } |
760 |
| - |
761 |
| - public int getLastNode() { |
762 |
| - return lastNode; |
763 |
| - } |
764 |
| - |
765 | 808 | public void runAlgo() {
|
766 | 809 | super.runAlgo();
|
767 | 810 | }
|
768 | 811 |
|
| 812 | + SPTEntry getLastEntry() { |
| 813 | + if (!finished()) |
| 814 | + throw new IllegalStateException("Cannot get max weight if not yet finished"); |
| 815 | + return lastEntry; |
| 816 | + } |
| 817 | + |
769 | 818 | @Override
|
770 | 819 | public boolean finished() {
|
771 | 820 | if (from) {
|
772 |
| - lastNode = currFrom.adjNode; |
| 821 | + lastEntry = currFrom; |
773 | 822 | return finishedFrom;
|
774 | 823 | } else {
|
775 |
| - lastNode = currTo.adjNode; |
| 824 | + lastEntry = currTo; |
776 | 825 | return finishedTo;
|
777 | 826 | }
|
778 | 827 | }
|
|
0 commit comments