Skip to content

Commit d29555a

Browse files
baumboikarussell
authored andcommitted
Improvement of landmark weight precision, fixes graphhopper#1080 (graphhopper#1376)
* Changed how the weights are stored I changed the way the landmark weights are stored from having two shorts (one for the forward and one for the backward value) to having 18 bits for the backward value and 14 bits for the difference between theese two. * Added some tests * Update LandmarkStorage.java * Update LandmarkStorageTest.java * Updated LandmarkStorage.java * dos2unix * make compiling again * revert changes from ammagamma * avoid warnings in tests and do not limit factor to 0.1 * ensure correct version is used for landmark storage * fix warning
1 parent 1eba0a6 commit d29555a

File tree

4 files changed

+206
-77
lines changed

4 files changed

+206
-77
lines changed

core/src/main/java/com/graphhopper/routing/lm/LandmarkStorage.java

Lines changed: 110 additions & 64 deletions
Original file line numberDiff line numberDiff line change
@@ -61,8 +61,6 @@ public class LandmarkStorage implements Storable<LandmarkStorage> {
6161
// one node has an associated landmark information ('one landmark row'): the forward and backward weight
6262
private long LM_ROW_LENGTH;
6363
private int landmarks;
64-
private final int FROM_OFFSET;
65-
private final int TO_OFFSET;
6664
private final DataAccess landmarkWeightDA;
6765
/* every subnetwork has its own landmark mapping but the count of landmarks is always the same */
6866
private final List<int[]> landmarkIDs;
@@ -74,15 +72,11 @@ public class LandmarkStorage implements Storable<LandmarkStorage> {
7472
private Weighting lmSelectionWeighting;
7573
private final TraversalMode traversalMode;
7674
private boolean initialized;
77-
private int minimumNodes = 500_000;
75+
private int minimumNodes;
7876
private final SubnetworkStorage subnetworkStorage;
7977
private List<LandmarkSuggestion> landmarkSuggestions = Collections.emptyList();
8078
private SpatialRuleLookup ruleLookup;
8179
private boolean logDetails = false;
82-
/**
83-
* 'to' and 'from' fit into 32 bit => 16 bit for each of them => 65536
84-
*/
85-
static final long PRECISION = 1 << 16;
8680

8781
public LandmarkStorage(GraphHopperStorage graph, Directory dir, final Weighting weighting, int landmarks) {
8882
this.graph = graph;
@@ -117,12 +111,14 @@ public String toString() {
117111
this.landmarks = landmarks;
118112
// one short per landmark and two directions => 2*2 byte
119113
this.LM_ROW_LENGTH = landmarks * 4;
120-
this.FROM_OFFSET = 0;
121-
this.TO_OFFSET = 2;
122114
this.landmarkIDs = new ArrayList<>();
123115
this.subnetworkStorage = new SubnetworkStorage(dir, "landmarks_" + name);
124116
}
125117

118+
public int getVersion() {
119+
return 1;
120+
}
121+
126122
/**
127123
* Specify the maximum possible value for your used area. With this maximum weight value you can influence the storage
128124
* precision for your weights that help A* finding its way to the goal. The same value is used for all subnetworks.
@@ -214,8 +210,8 @@ public void createLandmarks() {
214210
this.landmarkWeightDA.create(2000);
215211
this.landmarkWeightDA.ensureCapacity(maxBytes);
216212

217-
for (long pointer = 0; pointer < maxBytes; pointer += 2) {
218-
landmarkWeightDA.setShort(pointer, (short) SHORT_INFINITY);
213+
for (long pointer = 0; pointer < maxBytes; pointer += 4) {
214+
landmarkWeightDA.setInt(pointer, (DELTA_INF << FROM_WEIGHT_BITS) | FROM_WEIGHT_INF);
219215
}
220216

221217
String additionalInfo = "";
@@ -253,7 +249,7 @@ public void createLandmarks() {
253249
if (ruleLookup != null && ruleLookup.size() > 0) {
254250
StopWatch sw = new StopWatch().start();
255251
blockedEdges = findBorderEdgeIds(ruleLookup);
256-
tarjanFilter = new BlockedEdgesFilter(encoder, false, true, blockedEdges);
252+
tarjanFilter = new BlockedEdgesFilter(encoder, true, false, blockedEdges);
257253
if (logDetails)
258254
LOGGER.info("Made " + blockedEdges.size() + " edges inaccessible. Calculated country cut in " + sw.stop().getSeconds() + "s, " + Helper.getMemInfo());
259255
}
@@ -308,12 +304,14 @@ public void createLandmarks() {
308304
}
309305
}
310306

311-
landmarkWeightDA.setHeader(0 * 4, graph.getNodes());
307+
// make backward incompatible to force rebuilt (pre 0.11 releases had nodes count at 0)
308+
landmarkWeightDA.setHeader(0 * 4, getVersion());
312309
landmarkWeightDA.setHeader(1 * 4, landmarks);
313310
landmarkWeightDA.setHeader(2 * 4, subnetworkCount);
314311
if (factor * DOUBLE_MLTPL > Integer.MAX_VALUE)
315312
throw new UnsupportedOperationException("landmark weight factor cannot be bigger than Integer.MAX_VALUE " + factor * DOUBLE_MLTPL);
316313
landmarkWeightDA.setHeader(3 * 4, (int) Math.round(factor * DOUBLE_MLTPL));
314+
landmarkWeightDA.setHeader(4 * 4, graph.getNodes());
317315

318316
// serialize fast byte[] into DataAccess
319317
subnetworkStorage.create(graph.getNodes());
@@ -407,9 +405,9 @@ private boolean createLandmarksForSubnetwork(final int startNode, final byte[] s
407405
int lmNodeId = tmpLandmarkNodeIds[lmIdx];
408406
LandmarkExplorer explorer = new LandmarkExplorer(graph, this, weighting, traversalMode, true);
409407
explorer.setStartNode(lmNodeId);
410-
explorer.setFilter(blockedEdges, false, true);
408+
explorer.setFilter(blockedEdges, true, false);
411409
explorer.runAlgo();
412-
explorer.initLandmarkWeights(lmIdx, lmNodeId, LM_ROW_LENGTH, FROM_OFFSET);
410+
explorer.initLandmarkWeights(lmIdx, lmNodeId, LM_ROW_LENGTH);
413411

414412
// set subnetwork id to all explored nodes, but do this only for the first landmark
415413
if (lmIdx == 0) {
@@ -419,9 +417,9 @@ private boolean createLandmarksForSubnetwork(final int startNode, final byte[] s
419417

420418
explorer = new LandmarkExplorer(graph, this, weighting, traversalMode, false);
421419
explorer.setStartNode(lmNodeId);
422-
explorer.setFilter(blockedEdges, true, false);
420+
explorer.setFilter(blockedEdges, false, true);
423421
explorer.runAlgo();
424-
explorer.initLandmarkWeights(lmIdx, lmNodeId, LM_ROW_LENGTH, TO_OFFSET);
422+
explorer.initLandmarkWeights(lmIdx, lmNodeId, LM_ROW_LENGTH);
425423

426424
if (lmIdx == 0) {
427425
if (explorer.setSubnetworks(subnetworks, subnetworkId))
@@ -479,58 +477,102 @@ protected IntHashSet findBorderEdgeIds(SpatialRuleLookup ruleLookup) {
479477
* a node ID but the internal index of the landmark array.
480478
*/
481479
int getFromWeight(int landmarkIndex, int node) {
482-
int res = (int) landmarkWeightDA.getShort((long) node * LM_ROW_LENGTH + landmarkIndex * 4 + FROM_OFFSET)
483-
& 0x0000FFFF;
484-
assert res >= 0 : "Negative to weight " + res + ", landmark index:" + landmarkIndex + ", node:" + node;
485-
if (res == SHORT_INFINITY)
486-
// TODO can happen if endstanding oneway
487-
// we should set a 'from' value to SHORT_MAX if the 'to' value was already set to find real bugs
488-
// and what to return? Integer.MAX_VALUE i.e. convert to Double.pos_infinity upstream?
489-
return SHORT_MAX;
480+
//only the right bits of this integer store the backward value
481+
int res = landmarkWeightDA.getInt((long) node * LM_ROW_LENGTH + landmarkIndex * 4) & FROM_WEIGHT_INF;
482+
483+
if (res == FROM_WEIGHT_INF)
484+
return Integer.MAX_VALUE;
490485
// throw new IllegalStateException("Do not call getFromWeight for wrong landmark[" + landmarkIndex + "]=" + landmarkIDs[landmarkIndex] + " and node " + node);
491-
// TODO if(res == MAX) fallback to beeline approximation!?
492486

487+
assert res >= 0 : "Negative backward weight " + res + ", landmark index:" + landmarkIndex + ", node:" + node;
493488
return res;
494489
}
495490

496491
/**
497492
* @return the weight from the specified node to the landmark (specified *as index*)
498493
*/
499494
int getToWeight(int landmarkIndex, int node) {
500-
int res = (int) landmarkWeightDA.getShort((long) node * LM_ROW_LENGTH + landmarkIndex * 4 + TO_OFFSET)
501-
& 0x0000FFFF;
502-
assert res >= 0 : "Negative to weight " + res + ", landmark index:" + landmarkIndex + ", node:" + node;
503-
if (res == SHORT_INFINITY)
504-
return SHORT_MAX;
505-
// throw new IllegalStateException("Do not call getToWeight for wrong landmark[" + landmarkIndex + "]=" + landmarkIDs[landmarkIndex] + " and node " + node);
495+
int res = landmarkWeightDA.getInt((long) node * LM_ROW_LENGTH + landmarkIndex * 4);
496+
497+
//the left bits of "res" store the difference between forward and backward value
498+
int delta = res >> FROM_WEIGHT_BITS;
506499

500+
if (delta == DELTA_INF)
501+
return Integer.MAX_VALUE;
502+
// throw new IllegalStateException("Do not call getToWeight for wrong landmark[" + landmarkIndex + "]=" + landmarkIDs[landmarkIndex] + " and node " + node);
503+
504+
//the right bits of "res" store the backward value
505+
int from = res & FROM_WEIGHT_INF;
506+
507+
if (from == FROM_WEIGHT_INF) {
508+
from = DELTA_INF + 1;
509+
}
510+
511+
//to get the forward value you have to add the backward to the delta value
512+
res = from + delta;
513+
514+
assert res >= 0 : "Negative forward weight " + res + ", landmark index:" + landmarkIndex + ", node:" + node;
507515
return res;
508516
}
509517

510-
// Short.MAX_VALUE = 2^15-1 but we have unsigned short so we need 2^16-1
511-
private static final int SHORT_INFINITY = Short.MAX_VALUE * 2 + 1;
512-
// We have large values that do not fit into a short, use a specific maximum value
513-
private static final int SHORT_MAX = SHORT_INFINITY - 1;
518+
// 'to' and 'from' fit into 32 bit => 16 bit for each of them => 65536
519+
static final long PRECISION = 1 << 16;
520+
/* This value sets the amount of bits used to store the backward weight.
521+
The rest of overall 32 bits stores the difference between forward and backward weight*/
522+
private static final int FROM_WEIGHT_BITS = 18;
523+
// The backward weight is unsigned --> 2^x - 1
524+
private static final int FROM_WEIGHT_INF = (int) Math.pow(2, FROM_WEIGHT_BITS) - 1;
525+
// This value will be used if the backward weight is too large
526+
private static final int FROM_WEIGHT_MAX = FROM_WEIGHT_INF - 1;
527+
/* The difference between forward and backward weight is signed
528+
--> 2^(31-x) - 1 instead of 2^(32-x) - 1*/
529+
private static final int DELTA_INF = (int) Math.pow(2, 31 - FROM_WEIGHT_BITS) - 1;
530+
// This value will be used if the difference between these weights is too large and forward > backward
531+
private static final int DELTA_MAX = DELTA_INF - 1;
532+
// This value will be used if the difference between these weights is too large and forward < backward
533+
private static final int DELTA_MIN = -DELTA_INF - 1;
514534

515535
/**
516-
* @return false if the value capacity was reached and instead of the real value the SHORT_MAX was stored.
536+
* @return false if the value capacity was reached and instead of the real value the MAX was stored.
517537
*/
518-
final boolean setWeight(long pointer, double value) {
538+
final boolean setWeight(int lmIdx, int nodeId, long rowSize, double value, boolean from) {
519539
double tmpVal = value / factor;
520540
if (tmpVal > Integer.MAX_VALUE)
521-
throw new UnsupportedOperationException("Cannot store infinity explicitely, pointer=" + pointer + ", value: " + value);
541+
throw new UnsupportedOperationException("Cannot store infinity explicitly, landmark: " + lmIdx + ", node: " + nodeId + ", value: " + value);
522542

523-
if (tmpVal >= SHORT_MAX) {
524-
landmarkWeightDA.setShort(pointer, (short) SHORT_MAX);
525-
return false;
543+
if (from) {
544+
if (tmpVal >= FROM_WEIGHT_MAX) {
545+
landmarkWeightDA.setInt(nodeId * rowSize + lmIdx * 4, (DELTA_INF << FROM_WEIGHT_BITS) | FROM_WEIGHT_MAX);
546+
return false;
547+
} else {
548+
landmarkWeightDA.setInt(nodeId * rowSize + lmIdx * 4, (DELTA_INF << FROM_WEIGHT_BITS) | (int) tmpVal);
549+
return true;
550+
}
526551
} else {
527-
landmarkWeightDA.setShort(pointer, (short) tmpVal);
528-
return true;
552+
int fromWeight = getFromWeight(lmIdx, nodeId);
553+
int delta;
554+
if (fromWeight == Integer.MAX_VALUE) {
555+
fromWeight = FROM_WEIGHT_INF;
556+
delta = (int) tmpVal - DELTA_INF + 1;
557+
} else {
558+
delta = (int) tmpVal - fromWeight;
559+
}
560+
561+
if (delta >= DELTA_MAX) {
562+
landmarkWeightDA.setInt(nodeId * rowSize + lmIdx * 4, (DELTA_MAX << FROM_WEIGHT_BITS) | fromWeight);
563+
return false;
564+
} else if (delta <= DELTA_MIN) {
565+
landmarkWeightDA.setInt(nodeId * rowSize + lmIdx * 4, (DELTA_MIN << FROM_WEIGHT_BITS) | fromWeight);
566+
return false;
567+
} else {
568+
landmarkWeightDA.setInt(nodeId * rowSize + lmIdx * 4, (delta << FROM_WEIGHT_BITS) | fromWeight);
569+
return true;
570+
}
529571
}
530572
}
531573

532574
boolean isInfinity(long pointer) {
533-
return ((int) landmarkWeightDA.getShort(pointer) & 0x0000FFFF) == SHORT_INFINITY;
575+
return (landmarkWeightDA.getInt(pointer) & FROM_WEIGHT_INF) == FROM_WEIGHT_INF;
534576
}
535577

536578
int calcWeight(EdgeIteratorState edge, boolean reverse) {
@@ -661,9 +703,13 @@ public boolean loadExisting() {
661703
if (!subnetworkStorage.loadExisting())
662704
throw new IllegalStateException("landmark weights loaded but not the subnetworks!?");
663705

664-
int nodes = landmarkWeightDA.getHeader(0 * 4);
706+
int version = landmarkWeightDA.getHeader(0 * 4);
707+
if (version != getVersion())
708+
throw new IllegalArgumentException("Cannot load landmark data due to incompatible version. Storage used version: " + version + ", expected: " + getVersion());
709+
int nodes = landmarkWeightDA.getHeader(4 * 4);
665710
if (nodes != graph.getNodes())
666711
throw new IllegalArgumentException("Cannot load landmark data as written for different graph storage with " + nodes + " nodes, not " + graph.getNodes());
712+
667713
landmarks = landmarkWeightDA.getHeader(1 * 4);
668714
int subnetworks = landmarkWeightDA.getHeader(2 * 4);
669715
factor = landmarkWeightDA.getHeader(3 * 4) / DOUBLE_MLTPL;
@@ -728,7 +774,7 @@ public LandmarkExplorer(Graph g, LandmarkStorage lms, Weighting weighting, Trave
728774
super(g, weighting, tMode);
729775
this.lms = lms;
730776
this.from = from;
731-
// set one of the bi directions as already finished
777+
// set one of the bi directions as already finished
732778
if (from)
733779
finishedTo = true;
734780
else
@@ -737,8 +783,15 @@ public LandmarkExplorer(Graph g, LandmarkStorage lms, Weighting weighting, Trave
737783
setUpdateBestPath(false);
738784
}
739785

740-
public void setFilter(IntHashSet set, boolean bwd, boolean fwd) {
741-
EdgeFilter ef = new BlockedEdgesFilter(flagEncoder, bwd, fwd, set);
786+
public void setStartNode(int startNode) {
787+
if (from)
788+
initFrom(startNode, 0);
789+
else
790+
initTo(startNode, 0);
791+
}
792+
793+
void setFilter(IntHashSet set, boolean fwd, boolean bwd) {
794+
EdgeFilter ef = new BlockedEdgesFilter(flagEncoder, fwd, bwd, set);
742795
outEdgeExplorer = graph.createEdgeExplorer(ef);
743796
inEdgeExplorer = graph.createEdgeExplorer(ef);
744797
}
@@ -755,13 +808,6 @@ public int getLastNode() {
755808
return lastNode;
756809
}
757810

758-
public void setStartNode(int startNode) {
759-
if (from)
760-
initFrom(startNode, 0);
761-
else
762-
initTo(startNode, 0);
763-
}
764-
765811
public void runAlgo() {
766812
super.runAlgo();
767813
}
@@ -777,7 +823,7 @@ public boolean finished() {
777823
}
778824
}
779825

780-
public boolean setSubnetworks(final byte[] subnetworks, final int subnetworkId) {
826+
boolean setSubnetworks(final byte[] subnetworks, final int subnetworkId) {
781827
if (subnetworkId > 127)
782828
throw new IllegalStateException("Too many subnetworks " + subnetworkId);
783829

@@ -805,15 +851,15 @@ public boolean apply(int nodeId, SPTEntry value) {
805851
return failed.get();
806852
}
807853

808-
public void initLandmarkWeights(final int lmIdx, int lmNodeId, final long rowSize, final int offset) {
854+
public void initLandmarkWeights(final int lmIdx, int lmNodeId, final long rowSize) {
809855
IntObjectMap<SPTEntry> map = from ? bestWeightMapFrom : bestWeightMapTo;
810856
final AtomicInteger maxedout = new AtomicInteger(0);
811857
final Map.Entry<Double, Double> finalMaxWeight = new MapEntry<>(0d, 0d);
812858

813859
map.forEach(new IntObjectProcedure<SPTEntry>() {
814860
@Override
815861
public void apply(int nodeId, SPTEntry b) {
816-
if (!lms.setWeight(nodeId * rowSize + lmIdx * 4 + offset, b.weight)) {
862+
if (!lms.setWeight(lmIdx, nodeId, rowSize, b.weight, from)) {
817863
maxedout.incrementAndGet();
818864
finalMaxWeight.setValue(Math.max(b.weight, finalMaxWeight.getValue()));
819865
}
@@ -822,8 +868,8 @@ public void apply(int nodeId, SPTEntry b) {
822868

823869
if ((double) maxedout.get() / map.size() > 0.1) {
824870
LOGGER.warn("landmark " + lmIdx + " (" + nodeAccess.getLatitude(lmNodeId) + "," + nodeAccess.getLongitude(lmNodeId) + "): " +
825-
"too many weights were maxed out (" + maxedout.get() + "/" + map.size() + "). Use a bigger factor than " + lms.factor
826-
+ ". For example use the following in the config.yml: weighting=" + weighting.getName() + "|maximum=" + finalMaxWeight.getValue() * 1.2);
871+
"too many " + (from ? "backward" : "delta") + " weights were maxed out (" + maxedout.get() + "/" + map.size() + "). Factor is too small " + lms.factor
872+
+ ". To fix this increase maximum in config.yml: prepare.lm.weighting: " + weighting.getName() + "|maximum=" + finalMaxWeight.getValue() * 1.2);
827873
}
828874
}
829875
}
@@ -838,7 +884,7 @@ public int compare(Map.Entry<Integer, Integer> o1, Map.Entry<Integer, Integer> o
838884
}
839885
};
840886

841-
static GHPoint createPoint(Graph graph, int nodeId) {
887+
private static GHPoint createPoint(Graph graph, int nodeId) {
842888
return new GHPoint(graph.getNodeAccess().getLatitude(nodeId), graph.getNodeAccess().getLongitude(nodeId));
843889
}
844890

@@ -862,7 +908,7 @@ private static class BlockedEdgesFilter implements EdgeFilter {
862908
private final boolean fwd;
863909
private final boolean bwd;
864910

865-
public BlockedEdgesFilter(FlagEncoder encoder, boolean bwd, boolean fwd, IntHashSet blockedEdges) {
911+
public BlockedEdgesFilter(FlagEncoder encoder, boolean fwd, boolean bwd, IntHashSet blockedEdges) {
866912
this.encoder = encoder;
867913
this.bwd = bwd;
868914
this.fwd = fwd;
@@ -888,4 +934,4 @@ public String toString() {
888934
return encoder.toString() + ", bwd:" + bwd + ", fwd:" + fwd;
889935
}
890936
}
891-
}
937+
}

0 commit comments

Comments
 (0)