diff --git a/Numerics/Data/Interpolation/Bilinear.cs b/Numerics/Data/Interpolation/Bilinear.cs
index ae520b23..9d7e0b18 100644
--- a/Numerics/Data/Interpolation/Bilinear.cs
+++ b/Numerics/Data/Interpolation/Bilinear.cs
@@ -28,6 +28,7 @@
* THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
+
using Numerics.Distributions;
using System;
using System.Linq;
@@ -56,6 +57,7 @@ namespace Numerics.Data
///
///
///
+
[Serializable]
public class Bilinear
{
diff --git a/Numerics/Data/Paired Data/ProbabilityOrdinate.cs b/Numerics/Data/Paired Data/ProbabilityOrdinate.cs
index 38699b0a..fc0e59ef 100644
--- a/Numerics/Data/Paired Data/ProbabilityOrdinate.cs
+++ b/Numerics/Data/Paired Data/ProbabilityOrdinate.cs
@@ -78,7 +78,7 @@ public class ProbabilityOrdinates : List, INotifyCollectionChanged, INot
public event NotifyCollectionChangedEventHandler CollectionChanged;
///
- /// Occurs when a property value changes, such as
+ /// Occurs when a property value changes/>
/// or the indexer Item[].
///
public event PropertyChangedEventHandler PropertyChanged;
diff --git a/Numerics/Numerics.csproj b/Numerics/Numerics.csproj
index 2b99faf7..4c2823b6 100644
--- a/Numerics/Numerics.csproj
+++ b/Numerics/Numerics.csproj
@@ -34,8 +34,12 @@
2.0.0.0
-
-
+
+ enable
+
+
+
+
all
diff --git a/Test_Numerics/Data/Paired Data/Test_PairedData.cs b/Test_Numerics/Data/Paired Data/Test_PairedData.cs
index 802654d8..ec3d0f94 100644
--- a/Test_Numerics/Data/Paired Data/Test_PairedData.cs
+++ b/Test_Numerics/Data/Paired Data/Test_PairedData.cs
@@ -64,9 +64,9 @@ public Test_PairedData()
double[] xVals = new double[] { 230408, 288010, 345611, 403213, 460815, 518417, 576019, 633612, 691223, 748825, 806427, 864029, 921631, 1036834, 1152038 };
double[] yVals = new double[] { 1519.7, 1520.5, 1520.9, 1521.7, 1523.5, 1525.9, 1528.4, 1530.9, 1533.2, 1534.7, 1535.9, 1538, 1541.3, 1547.7, 1552.7 };
_dataset1 = new OrderedPairedData(xVals, yVals, true, SortOrder.Ascending, true, SortOrder.Ascending);
- _dataset2 = new OrderedPairedData(xVals.Reverse().ToArray(), yVals, true, SortOrder.Descending, true, SortOrder.Ascending);
- _dataset3 = new OrderedPairedData(xVals, yVals.Reverse().ToArray(), true, SortOrder.Ascending, true, SortOrder.Descending);
- _dataset4 = new OrderedPairedData(xVals.Reverse().ToArray(), yVals.Reverse().ToArray(), true, SortOrder.Descending, true, SortOrder.Descending);
+ _dataset2 = new OrderedPairedData(Enumerable.Reverse(xVals).ToArray(), yVals, true, SortOrder.Descending, true, SortOrder.Ascending);
+ _dataset3 = new OrderedPairedData(xVals, Enumerable.Reverse(yVals).ToArray(), true, SortOrder.Ascending, true, SortOrder.Descending);
+ _dataset4 = new OrderedPairedData(Enumerable.Reverse(xVals).ToArray(), Enumerable.Reverse(yVals).ToArray(), true, SortOrder.Descending, true, SortOrder.Descending);
}
///
@@ -184,12 +184,12 @@ public void Test_Indexing()
dataset11.Remove(ordinate);
bool test4 = dataset11.Contains(ordinate);
- Assert.AreEqual(false, test4);
+ Assert.IsFalse(test4);
Ordinate newOrdinate = dataset11[4];
dataset11.RemoveAt(4);
bool test5 = dataset11.Contains(newOrdinate);
- Assert.AreEqual(false, test5);
+ Assert.IsFalse(test5);
Ordinate newOrdinate2 = new Ordinate(1243177, 1563.8);
dataset11.Add(newOrdinate2);
diff --git a/Test_Numerics/Data/Paired Data/Test_UncertainOrdinate.cs b/Test_Numerics/Data/Paired Data/Test_UncertainOrdinate.cs
index 92320f98..b9062e51 100644
--- a/Test_Numerics/Data/Paired Data/Test_UncertainOrdinate.cs
+++ b/Test_Numerics/Data/Paired Data/Test_UncertainOrdinate.cs
@@ -85,14 +85,14 @@ public void Test_Construction()
// Also testing overloaded equality operators
Assert.IsTrue(unordinate1 == unordinate3);
Assert.IsTrue(unordinate1 == unordinate2);
- Assert.AreEqual(unordinate1.X, 2);
- Assert.AreEqual(unordinate1.Y, distribution);
- Assert.AreEqual(unordinate3.X, 2);
+ Assert.AreEqual(2,unordinate1.X);
+ Assert.AreEqual(distribution, unordinate1.Y);
+ Assert.AreEqual(2,unordinate3.X );
Assert.IsTrue(unordinate3.Y == distribution);
- Assert.AreEqual(unordinate1.IsValid, true);
- Assert.AreEqual(unordinate4.IsValid, false);
- Assert.AreEqual(unordinate5.IsValid, false);
+ Assert.IsTrue(unordinate1.IsValid );
+ Assert.IsFalse(unordinate4.IsValid);
+ Assert.IsFalse(unordinate5.IsValid);
Assert.IsTrue(unordinate1 != unordinate4);
Assert.IsTrue(unordinate1 != unordinate5);
diff --git a/Test_Numerics/Data/Paired Data/Test_UncertainPairedData.cs b/Test_Numerics/Data/Paired Data/Test_UncertainPairedData.cs
index a7bd4df7..6a092904 100644
--- a/Test_Numerics/Data/Paired Data/Test_UncertainPairedData.cs
+++ b/Test_Numerics/Data/Paired Data/Test_UncertainPairedData.cs
@@ -68,9 +68,9 @@ public Test_UncertainPairedData()
UnivariateDistributionBase[] yVals = new UnivariateDistributionBase[] { new Triangular(1, 2, 3), new Triangular(2, 4, 5), new Triangular(6, 8, 12), new Triangular(13, 19, 20) };
_dataset1 = new UncertainOrderedPairedData(xVals, yVals, true, SortOrder.Ascending, true, SortOrder.Ascending, UnivariateDistributionType.Triangular);
- _dataset2 = new UncertainOrderedPairedData(xVals.Reverse().ToArray(), yVals, true, SortOrder.Descending, true, SortOrder.Ascending, UnivariateDistributionType.Triangular);
- _dataset3 = new UncertainOrderedPairedData(xVals, yVals.Reverse().ToArray(), true, SortOrder.Ascending, true, SortOrder.Descending, UnivariateDistributionType.Triangular);
- _dataset4 = new UncertainOrderedPairedData(xVals.Reverse().ToArray(), yVals.Reverse().ToArray(), true, SortOrder.Descending, true, SortOrder.Descending, UnivariateDistributionType.Triangular);
+ _dataset2 = new UncertainOrderedPairedData(Enumerable.Reverse(xVals).ToArray(), yVals, true, SortOrder.Descending, true, SortOrder.Ascending, UnivariateDistributionType.Triangular);
+ _dataset3 = new UncertainOrderedPairedData(xVals, Enumerable.Reverse(yVals).ToArray(), true, SortOrder.Ascending, true, SortOrder.Descending, UnivariateDistributionType.Triangular);
+ _dataset4 = new UncertainOrderedPairedData(Enumerable.Reverse(xVals).ToArray(), Enumerable.Reverse(yVals).ToArray(), true, SortOrder.Descending, true, SortOrder.Descending, UnivariateDistributionType.Triangular);
}
///
@@ -112,9 +112,9 @@ public void Test_CurveSample()
double[] yMeanVals = new double[] { 2, 3.66667, 8.66667, 17.33333 };
var data1Expected = new OrderedPairedData(xVals, yMeanVals, true, SortOrder.Ascending, true, SortOrder.Ascending);
- var data2Expected = new OrderedPairedData(xVals.Reverse().ToArray(), yMeanVals, true, SortOrder.Descending, true, SortOrder.Ascending);
- var data3Expected = new OrderedPairedData(xVals, yMeanVals.Reverse().ToArray(), true, SortOrder.Ascending, true, SortOrder.Descending);
- var data4Expected = new OrderedPairedData(xVals.Reverse().ToArray(), yMeanVals.Reverse().ToArray(), true, SortOrder.Descending, true, SortOrder.Descending);
+ var data2Expected = new OrderedPairedData(Enumerable.Reverse(xVals).ToArray(), yMeanVals, true, SortOrder.Descending, true, SortOrder.Ascending);
+ var data3Expected = new OrderedPairedData(xVals, Enumerable.Reverse(yMeanVals).ToArray(), true, SortOrder.Ascending, true, SortOrder.Descending);
+ var data4Expected = new OrderedPairedData(Enumerable.Reverse(xVals).ToArray(), Enumerable.Reverse(yMeanVals).ToArray(), true, SortOrder.Descending, true, SortOrder.Descending);
for (int i = 0; i < data1.Count; i++)
{
@@ -153,9 +153,9 @@ public void Test_Curve_Sample_Probability()
double[] yInverseVals = new double[] { 2, 3.732051, 8.535898, 17.58258 };
var data1Expected = new OrderedPairedData(xVals, yInverseVals, true, SortOrder.Ascending, true, SortOrder.Ascending);
- var data2Expected = new OrderedPairedData(xVals.Reverse().ToArray(), yInverseVals, true, SortOrder.Descending, true, SortOrder.Ascending);
- var data3Expected = new OrderedPairedData(xVals, yInverseVals.Reverse().ToArray(), true, SortOrder.Ascending, true, SortOrder.Descending);
- var data4Expected = new OrderedPairedData(xVals.Reverse().ToArray(), yInverseVals.Reverse().ToArray(), true, SortOrder.Descending, true, SortOrder.Descending);
+ var data2Expected = new OrderedPairedData(Enumerable.Reverse(xVals).ToArray(), yInverseVals, true, SortOrder.Descending, true, SortOrder.Ascending);
+ var data3Expected = new OrderedPairedData(xVals, Enumerable.Reverse(yInverseVals).ToArray(), true, SortOrder.Ascending, true, SortOrder.Descending);
+ var data4Expected = new OrderedPairedData(Enumerable.Reverse(xVals).ToArray(), Enumerable.Reverse(yInverseVals).ToArray(), true, SortOrder.Descending, true, SortOrder.Descending);
for (int i = 0; i < data1.Count; i++)
{
@@ -189,12 +189,12 @@ public void Test_IList()
// Test Remove and Contains
pairedData.Remove(ordinate);
bool test2 = pairedData.Contains(ordinate);
- Assert.AreEqual(false, test2);
+ Assert.IsFalse(test2);
// Test RemoveAt and Contains
pairedData.RemoveAt(2);
bool test3 = pairedData.Contains(ordinate);
- Assert.AreEqual(false, test3);
+ Assert.IsFalse(test3);
// Test Insert and IndexOf
pairedData.Insert(2, ordinate);
diff --git a/Test_Numerics/Data/Statistics/Test_GoodnessOfFit.cs b/Test_Numerics/Data/Statistics/Test_GoodnessOfFit.cs
index 5add8fa8..5713518e 100644
--- a/Test_Numerics/Data/Statistics/Test_GoodnessOfFit.cs
+++ b/Test_Numerics/Data/Statistics/Test_GoodnessOfFit.cs
@@ -826,11 +826,11 @@ public void Test_MetricsConsistency_GoodModel()
double RSR = GoodnessOfFit.RSR(observed, modeled);
// Good model expectations
- Assert.IsTrue(NSE > 0.9, "NSE should be > 0.9 for a good model");
- Assert.IsTrue(KGE > 0.9, "KGE should be > 0.9 for a good model");
- Assert.IsTrue(RMSE < 5.0, "RMSE should be low for a good model");
- Assert.IsTrue(Math.Abs(PBIAS) < 5.0, "PBIAS should be low for a good model");
- Assert.IsTrue(RSR < 0.5, "RSR should be < 0.5 for a good model");
+ Assert.IsGreaterThan(0.9, NSE);
+ Assert.IsGreaterThan(0.9, KGE);
+ Assert.IsLessThan(5.0, RMSE);
+ Assert.IsLessThan(5.0, Math.Abs(PBIAS));
+ Assert.IsLessThan(0.5, RSR);
}
///
@@ -854,12 +854,12 @@ public void Test_MetricsConsistency_PoorModel()
// - RMSE equals the standard deviation of observations
// - RSR should be exactly 1.0 (RMSE / StdDev)
- Assert.IsTrue(NSE <= 0.05, $"NSE should be near 0 for constant-at-mean prediction, got {NSE}");
+ Assert.IsLessThanOrEqualTo(0.05, NSE);
// KGE returns -10.0 for zero-variance predictions (degenerate case)
- Assert.IsTrue(KGE < -5.0, $"KGE should be very poor for constant predictions (zero variance), got {KGE}");
+ Assert.IsLessThan(-5.0, KGE);
- Assert.IsTrue(RMSE > 15.0, "RMSE should be high for a poor model");
+ Assert.IsGreaterThan(15.0, RMSE);
Assert.IsTrue(RSR >= 0.95 && RSR <= 1.05, $"RSR should be approximately 1.0 for constant-at-mean prediction, got {RSR}");
}
diff --git a/Test_Numerics/Data/Time Series/Test_TimeSeriesDownload.cs b/Test_Numerics/Data/Time Series/Test_TimeSeriesDownload.cs
index 79848889..bf4444b8 100644
--- a/Test_Numerics/Data/Time Series/Test_TimeSeriesDownload.cs
+++ b/Test_Numerics/Data/Time Series/Test_TimeSeriesDownload.cs
@@ -146,7 +146,7 @@ public class Test_TimeSeriesDownload
private static void AssertDailySeriesMonotonic(TimeSeries ts)
{
Assert.IsNotNull(ts, "Time series is null.");
- Assert.IsTrue(ts.Count > 0, "Time series is empty.");
+ Assert.IsGreaterThan( 0, ts.Count);
DateTime? prev = null;
foreach (var pt in ts)
@@ -175,9 +175,9 @@ private static void AssertRoughlyEqual(double a, double b, double relTol = 1e-6,
double denom = Math.Max(Math.Abs(a), Math.Abs(b));
if (denom == 0)
- Assert.IsTrue(diff <= absTol);
+ Assert.IsLessThanOrEqualTo(absTol,diff);
else
- Assert.IsTrue(diff / denom <= relTol, $"Values differ: {a} vs {b}");
+ Assert.IsLessThanOrEqualTo(relTol, diff / denom);
}
#endregion
@@ -572,7 +572,7 @@ public async Task BOM_WindowedDownload_Works()
startDate: WinStart, endDate: WinEnd);
Assert.IsNotNull(ts, "Time series is null.");
- Assert.IsTrue(ts.Count > 0, "Time series is empty.");
+ Assert.IsGreaterThan(0, ts.Count);
// Verify data is within requested window (allowing for some timezone flexibility)
var firstDate = ts.First().Index;
diff --git a/Test_Numerics/Distributions/Univariate/Test_EmpiricalDistribution.cs b/Test_Numerics/Distributions/Univariate/Test_EmpiricalDistribution.cs
index 4b51d143..d7e508a8 100644
--- a/Test_Numerics/Distributions/Univariate/Test_EmpiricalDistribution.cs
+++ b/Test_Numerics/Distributions/Univariate/Test_EmpiricalDistribution.cs
@@ -259,7 +259,7 @@ public void Test_ConvolveFiveDistributions()
var convolved = EmpiricalDistribution.Convolve(distributions, 1024);
// Assert number of points
- Assert.AreEqual(1024, convolved.XValues.Count, "Should have exactly 1024 points");
+ Assert.HasCount(1024, convolved.XValues);
// Expected: Range ≈ [0, 10], Mean ≈ 5
Assert.AreEqual(0.0, convolved.Minimum, 0.2, "Minimum should be approximately 0");
diff --git a/Test_Numerics/Mathematics/Optimization/Dynamic/BinaryHeapTesting.cs b/Test_Numerics/Mathematics/Optimization/Dynamic/BinaryHeapTesting.cs
index 5b24a99b..d2230212 100644
--- a/Test_Numerics/Mathematics/Optimization/Dynamic/BinaryHeapTesting.cs
+++ b/Test_Numerics/Mathematics/Optimization/Dynamic/BinaryHeapTesting.cs
@@ -143,7 +143,7 @@ public void HeapTest4()
//Compare
for (int i = 0; i < weights.Length; i++)
{
- Assert.AreEqual(heap.RemoveMin().Value == weights[i], false);
+ Assert.AreNotEqual(heap.RemoveMin().Value , weights[i]);
}
}
diff --git a/Test_Numerics/Mathematics/Optimization/Dynamic/DijkstraTesting.cs b/Test_Numerics/Mathematics/Optimization/Dynamic/DijkstraTesting.cs
index f82cb685..8b805ca4 100644
--- a/Test_Numerics/Mathematics/Optimization/Dynamic/DijkstraTesting.cs
+++ b/Test_Numerics/Mathematics/Optimization/Dynamic/DijkstraTesting.cs
@@ -52,11 +52,11 @@ public void SimpleEdgeGraphCost()
float[,] result = Dijkstra.Solve(edges, 3,6);
- Assert.AreEqual(result[3, 2], 0f);
- Assert.AreEqual(result[2, 2], 3f);
- Assert.AreEqual(result[1, 2], 4f);
- Assert.AreEqual(result[0, 2], 6f);
- Assert.AreEqual(result[4, 2], 7f);
+ Assert.AreEqual(0f, result[3, 2]);
+ Assert.AreEqual(3f, result[2, 2]);
+ Assert.AreEqual(4f, result[1, 2]);
+ Assert.AreEqual(6f, result[0, 2]);
+ Assert.AreEqual(7f, result[4, 2]);
Assert.IsTrue(float.IsPositiveInfinity(result[5,2]));
}
@@ -117,11 +117,11 @@ public void SimpleNetworkRouting()
float[,] result = Dijkstra.Solve(edges,9);
- Assert.AreEqual(result[0, 0], 5f); //Algorithm is choosing the next node that yields the shortest paths
- Assert.AreEqual(result[0, 2], 8f);
+ Assert.AreEqual(5f, result[0, 0]); //Algorithm is choosing the next node that yields the shortest paths
+ Assert.AreEqual(8f, result[0, 2]);
- Assert.AreEqual(result[1, 0], 7);
- Assert.AreEqual(result[1, 2], 5);
+ Assert.AreEqual(7, result[1, 0]);
+ Assert.AreEqual(5, result[1, 2]);
Assert.AreEqual(1, result[2, 0]);
Assert.AreEqual(6, result[2, 2]);
@@ -241,10 +241,10 @@ public void MultipleDestSharedPath()
var result = Dijkstra.Solve(edges, [0,3],4);
- Assert.AreEqual(result[1, 0], 0);
- Assert.AreEqual(result[1, 2], 3);
- Assert.AreEqual(result[2, 0], 1);
- Assert.AreEqual(result[2, 2], 5);
+ Assert.AreEqual(0, result[1, 0]);
+ Assert.AreEqual(3, result[1, 2]);
+ Assert.AreEqual(1, result[2, 0]);
+ Assert.AreEqual(5, result[2, 2]);
}
///
@@ -262,10 +262,10 @@ public void DisconnectedComponent()
new Edge(2,3,1,2)
};
var result = Dijkstra.Solve(edges, [0, 3], 4);
- Assert.AreEqual(result[1,0],0);
- Assert.AreEqual(result[1, 2], 3);
- Assert.AreEqual(result[2, 0], 3);
- Assert.AreEqual(result[2, 2], 1);
+ Assert.AreEqual(0, result[1, 0]);
+ Assert.AreEqual(3, result[1, 2]);
+ Assert.AreEqual(3, result[2, 0]);
+ Assert.AreEqual(1, result[2, 2]);
}
///
@@ -304,11 +304,11 @@ public void TrianglePath()
};
var result = Dijkstra.Solve(edges, [0, 2], 3);
- Assert.AreEqual(result[0, 0], 0);
- Assert.AreEqual(result[0, 2], 0);
- Assert.AreEqual(result[1, 0], 2);
- Assert.AreEqual(result[1, 2], 1);
- Assert.AreEqual(result[2, 0], 2);
+ Assert.AreEqual(0, result[0, 0]);
+ Assert.AreEqual(0, result[0, 2]);
+ Assert.AreEqual(2, result[1, 0]);
+ Assert.AreEqual(1, result[1, 2]);
+ Assert.AreEqual(2, result[2, 0]);
}
}
}
diff --git a/Test_Numerics/Test_Numerics.csproj b/Test_Numerics/Test_Numerics.csproj
index f96aca23..d25b4db4 100644
--- a/Test_Numerics/Test_Numerics.csproj
+++ b/Test_Numerics/Test_Numerics.csproj
@@ -20,6 +20,8 @@
+
+