Skip to content

Commit

Permalink
Issue 106: SVM improve sample-spacing computation
Browse files Browse the repository at this point in the history
  • Loading branch information
gwlucastrig committed Jan 20, 2024
1 parent 7e660b0 commit 4949221
Show file tree
Hide file tree
Showing 7 changed files with 321 additions and 89 deletions.
42 changes: 38 additions & 4 deletions core/src/main/java/org/tinfour/common/Vertex.java
Original file line number Diff line number Diff line change
Expand Up @@ -104,6 +104,12 @@ public class Vertex implements ISamplePoint {
*/
public static final int BIT_CONSTRAINT = 0x02;

/**
* A bit flag indicating that the vertex is to be treated as "withheld" and
* should not be processed as part of a Delaunay triangulation (TIN).
* This flag is used in support of data filtering and similar operations.
*/
public static final int BIT_WITHHELD = 0x04;
/**
* An indexing value assigned to the Vertex. In this package, it is used
* primary for diagnostic purposes and labeling graphics.
Expand Down Expand Up @@ -365,6 +371,34 @@ public void setConstraintMember(boolean constraintMember) {
}
}



/**
* Indicates whether a vertex is marked as withheld. This setting is
* typically set by application code or other utilities rather than by Tinfour
* internal operations.
*
* @return true if vertex is withheld; otherwise, false
*/
public boolean isWithheld() {
return (status & BIT_WITHHELD) != 0;
}

/**
* Sets or clears the is-withheld status of a vertex.
*
* @param synthetic true if vertex is withheld; otherwise, false
*/
public void setWithheld(boolean synthetic) {
if (synthetic) {
status |= BIT_WITHHELD;
} else {
status &= ~BIT_WITHHELD;
}
}



/**
* Sets the status value of the vertex. This method is intended to
* provide an efficient way of setting multiple status flags at once.
Expand All @@ -387,7 +421,7 @@ public int getStatus() {

/**
* Indicates whether a vertex is part of a constraint definition or
* lies on the border of an area constraint.
* lies on the border of an area constraint.
*
* @return true if vertex is a constraint member; otherwise, false
*/
Expand All @@ -396,7 +430,7 @@ public boolean isConstraintMember() {
}

/**
* Gets the auxiliary index for the vertex. The auxiliary index
* Gets the auxiliary index for the vertex. The auxiliary index
* field is one byte in size and supports integer values in the
* range 0 to 255. It is used to support graph-coloring algorithms
* but is available for other uses as well.
Expand All @@ -407,10 +441,10 @@ public int getAuxiliaryIndex() {
}

/**
* Sets the auxiliary index for the vertex. The auxiliary index
* Sets the auxiliary index for the vertex. The auxiliary index
* field is one byte in size and supports integer values in the
* range 0 to 255. It is used to support graph-coloring algorithms
* but is available for other uses as well.
* but is available for other uses as well.
* @param auxiliaryIndex a value in the range 0 to 255
*/
public void setAuxiliaryIndex(int auxiliaryIndex) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -778,6 +778,9 @@ private void buildRegions() {
buildRegionsUsingPerimeter();

for (Contour contour : closedContourList) {
if(contour.isEmpty()){
continue;
}
ContourRegion region = new ContourRegion(contour);
regionList.add(region);
}
Expand Down
6 changes: 6 additions & 0 deletions svm/src/main/java/org/tinfour/svm/SvmBathymetryData.java
Original file line number Diff line number Diff line change
Expand Up @@ -655,4 +655,10 @@ public void setSurveyPerimeter(List<Vertex>perimeterVertices){
public List<Vertex>getSurveyPerimeter(){
return surveyPerimeter;
}

void replaceSoundings(List<Vertex>replacements){
soundings.clear();
supplement.clear();
soundings.addAll(replacements);
}
}
188 changes: 124 additions & 64 deletions svm/src/main/java/org/tinfour/svm/SvmComputation.java
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,6 @@
import java.io.PrintStream;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.BitSet;
import java.util.List;
import java.util.function.Consumer;
import org.tinfour.common.GeometricOperations;
Expand Down Expand Up @@ -270,6 +269,24 @@ int getFlatTriangleCount() {
}
}

private static class SampleSpacing{
final int nSamples;
final double sigma;
final double mean;
final double median;
final double lenMin;
final double lenMax;

SampleSpacing(int nSamples, double mean, double sigma, double median, double lenMin, double lenMax){
this.nSamples = nSamples;
this.mean = mean;
this.sigma = sigma;
this.median = median;
this.lenMin = lenMin;
this.lenMax = lenMax;
}
}

/**
* Performs the main process, printing the results to the specified print
* stream.
Expand Down Expand Up @@ -335,6 +352,34 @@ public void processVolume(
long timeToBuildTin = time1 - time0;
long timeToAddConstraints = time2 - time1;

long spTime0 = System.nanoTime();
SampleSpacing spacing = this.evaluateSampleSpacing(tin);
long spTime1 = System.nanoTime();
long timeToFindSampleSpacing = spTime1-spTime0;

// The experimental filter is a non-advertised feature for removing
// anomalous points.
if(properties.isExperimentalFilterEnabled()){
ps.println("Processing experimental filter");
spTime0 = System.nanoTime();
SvmSinglePointAnomalyFilter filter = new SvmSinglePointAnomalyFilter();
int nFilter = filter.process(ps, tin);
if(nFilter>0){
// some vertices were marked as withheld
ArrayList<Vertex> filteredSamples = new ArrayList<>(soundings.size());
for(Vertex v: soundings){
if(!v.isWithheld()){
filteredSamples.add(v);
}
}
soundings = filteredSamples;
data.replaceSoundings(filteredSamples);
spTime1 = System.nanoTime();
long timeToFilter = spTime1-spTime0;
ps.format("Time for experimental filter %9.1f ms%n", timeToFilter/1.0e+6);
}
}

TriangleSurvey trigSurvey = new TriangleSurvey(tin, shoreReferenceElevation);
TriangleCollector.visitSimpleTriangles(tin, trigSurvey);

Expand Down Expand Up @@ -468,46 +513,14 @@ public void processVolume(
data.getMaxZ() / lengthFactor,
(data.getMaxZ() - data.getMinZ()) / lengthFactor);

if (properties.isSoundingSpacingEnabled()) {
KahanSummation sumLen = new KahanSummation();
KahanSummation sumLen2 = new KahanSummation();
int nEdgeMax = tin.getMaximumEdgeAllocationIndex();
BitSet visited = new BitSet(nEdgeMax);
float[] lenArray = new float[nEdgeMax];
int nLen = 0;
for(IQuadEdge edge: tin.edges()){
int eIndex = edge.getBaseIndex();
if(visited.get(eIndex)){
continue;
}
visited.set(eIndex);
if(edge.isConstrainedRegionBorder()){
continue;
}

if(lakeConsumer.isWater(edge)){
double len = edge.getLength();
sumLen.add(len);
sumLen2.add(len*len);
lenArray[nLen++] = (float)len;
}
}

Arrays.sort(lenArray, 0, nLen);
double sLen = sumLen.getSum();
double sLen2 = sumLen2.getSum();
double sigma = Double.NaN;
double meanLen = sumLen.getMean();
double medianLen = lenArray[nLen / 2];
if(nLen>2){
sigma = Math.sqrt((sLen2 - (sLen / nLen) * sLen) / (nLen - 1));
}
ps.format(" Sounding spacing%n");
ps.format(" mean %12.3f %s%n", meanLen / lengthFactor, lengthUnits);
ps.format(" std dev %12.3f %s%n", sigma/lengthFactor, lengthUnits);
ps.format(" median %12.3f %s%n", medianLen / lengthFactor, lengthUnits);
ps.format("%n");
}
ps.format(" Sounding spacing%n");
ps.format(" mean %12.3f %s%n", spacing.mean / lengthFactor, lengthUnits);
ps.format(" std dev %12.3f %s%n", spacing.sigma / lengthFactor, lengthUnits);
ps.format(" median %12.3f %s%n", spacing.median/ lengthFactor, lengthUnits);
ps.format(" maximim %12.3f %s%n", spacing.lenMax/ lengthFactor, lengthUnits);
ps.format(" minimum %14.5f %s%n", spacing.lenMin/ lengthFactor, lengthUnits);
ps.format("%n");

double rawVolume = lakeConsumer.getVolume();
double rawSurfArea = lakeConsumer.getSurfaceArea();
Expand All @@ -518,7 +531,6 @@ public void processVolume(
double surfArea = lakeConsumer.getSurfaceArea() / areaFactor;
double avgDepth = (rawVolume / rawSurfArea) / lengthFactor;
double adjMeanDepth = rawAdjMeanDepth / lengthFactor;
double vertexSpacing = estimateInteriorVertexSpacing(tin, lakeConsumer);
double rawFlatArea = lakeConsumer.getFlatArea();
double flatArea = lakeConsumer.getFlatArea() / areaFactor;

Expand All @@ -533,7 +545,7 @@ public void processVolume(
flatArea, areaUnits, rawFlatArea, lengthUnits);
ps.format(" Avg depth %18.2f %s%n", avgDepth, lengthUnits);
ps.format(" Adj mean depth %18.2f %s%n", adjMeanDepth, lengthUnits);
ps.format(" Mean Vertex Spacing %18.2f %s%n", vertexSpacing, lengthUnits);
ps.format(" Mean Vertex Spacing %18.2f %s%n", spacing.mean, lengthUnits);
} else {
ps.format(" Volume %,18.2f %s %,28.1f %s^3%n",
volume, volumeUnits, rawVolume, lengthUnits);
Expand All @@ -543,7 +555,7 @@ public void processVolume(
flatArea, areaUnits, rawFlatArea, lengthUnits);
ps.format(" Avg depth %,18.2f %s%n", avgDepth, lengthUnits);
ps.format(" Adj mean depth %,18.2f %s%n", adjMeanDepth, lengthUnits);
ps.format(" Mean Vertex Spacing %,18.2f %s%n", vertexSpacing, lengthUnits);
ps.format(" Mean Vertex Spacing %,18.2f %s%n", spacing.mean, lengthUnits);
}
ps.format(" N Triangles %15d%n", lakeConsumer.nTriangles);
ps.format(" N Flat Triangles %15d%n", lakeConsumer.nFlatTriangles);
Expand All @@ -563,6 +575,7 @@ public void processVolume(
ps.format("Time to load data %9.1f ms%n", data.getTimeToLoadData() / 1.0e+6);
ps.format("Time to build TIN %9.1f ms%n", timeToBuildTin / 1.0e+6);
ps.format("Time to add shore constraint %9.1f ms%n", timeToAddConstraints / 1.0e+6);
ps.format("Time to find sample spacing %9.1f ms%n", timeToFindSampleSpacing/1.0e+6);
ps.format("Time to remedy flat triangles %9.1f ms%n", timeToFixFlats / 1.0e+6);
ps.format("Time to compute lake volume %9.1f ms%n", (time2 - time1) / 1.0e+6);
ps.format("Time for all analysis %9.1f ms%n", (time2 - time0) / 1.0e+6);
Expand Down Expand Up @@ -678,35 +691,82 @@ private double getPerimeterSum(List<PolygonConstraint> constraints) {
return perimeterSum.getSum();
}


/**
* Estimates vertex spacing for TIN based exclusively on interior edges.
* Constraint edges and perimeter edges are not included.
*
* @param tin a valid TIN.
* @param lakeData a valid instance of input data
* @return an estimated vertex spacing
* Finds the spacing between samples in the source data, selecting
* neighbors based on the Delaunay triangulation. This method
* applies logic to exclude constraint edges and edges that
* attach samples to constraint vertices. The rationale
* for this exclusion is that the constraints have a different
* level of data collection than the samples and would have
* different statistics. The logic below also excludes
* perimeter edges
* <p>
* This logic assumes that the TIN was prepared using the SVM conventions
* of populating the auxiliary index of the sample vertices with
* a flag indicating that they are depth samples.
* @param tin a valid instance
* @return a valid instance
*/
private double estimateInteriorVertexSpacing(IIncrementalTin tin, LakeData lakeData) {
KahanSummation sumLength = new KahanSummation();
int n = 0;
for (IQuadEdge e : tin.edges()) {
if (lakeData.isWater(e)) {
// the edge lies in a water area, but we also need
private SampleSpacing evaluateSampleSpacing(IIncrementalTin tin){

List<IConstraint> constraintsFromTin = tin.getConstraints();
boolean[] water = new boolean[constraintsFromTin.size()];
for (IConstraint con : constraintsFromTin) {
water[con.getConstraintIndex()] = (Boolean) con.getApplicationData();
}
KahanSummation sumLen = new KahanSummation();
KahanSummation sumLen2 = new KahanSummation();
int nEdgeMax = tin.getMaximumEdgeAllocationIndex();
float[] lenArray = new float[nEdgeMax];
int nLen = 0;
double lenMin = Double.POSITIVE_INFINITY;
double lenMax = Double.NEGATIVE_INFINITY;


for (IQuadEdge edge : tin.edges()) {
if (!edge.isConstrainedRegionInterior()) {
continue;
}

int conIndex = edge.getConstraintIndex();
if (water[conIndex]) {
// the edge lies in a water area, but we also need
// to exclude any edges that connect a sounding to
// a constraint border.
Vertex a = e.getA();
Vertex a = edge.getA();
Vertex b = edge.getB();
int aAux = a.getAuxiliaryIndex();
int bAux = a.getAuxiliaryIndex();
if (aAux == SvmBathymetryData.BATHYMETRY_SOURCE && bAux == SvmBathymetryData.BATHYMETRY_SOURCE) {
n++;
sumLength.add(e.getLength());
int bAux = b.getAuxiliaryIndex();
if (aAux == SvmBathymetryData.BATHYMETRY_SOURCE
&& bAux == SvmBathymetryData.BATHYMETRY_SOURCE) {
double len = edge.getLength();
sumLen.add(len);
sumLen2.add(len * len);
lenArray[nLen++] = (float) len;
if(len<lenMin){
lenMin = len;
}
if(len>lenMax){
lenMax = len;
}
}
}
}
if (n == 0) {
return 0;

Arrays.sort(lenArray, 0, nLen);
double sLen = sumLen.getSum();
double sLen2 = sumLen2.getSum();

double sigma = Double.NaN;
double mean = sumLen.getMean();
double median = lenArray[nLen / 2];
if (nLen > 2) {
sigma = Math.sqrt((sLen2 - (sLen / nLen) * sLen) / (nLen - 1));
}
return sumLength.getSum() / n;
}


return new SampleSpacing(nLen, mean, sigma, median, lenMin, lenMax);

}
}
Loading

0 comments on commit 4949221

Please sign in to comment.