diff --git a/Numerics/Data/Interpolation/CubicSpline.cs b/Numerics/Data/Interpolation/CubicSpline.cs index 804feb3a..6a32a3b1 100644 --- a/Numerics/Data/Interpolation/CubicSpline.cs +++ b/Numerics/Data/Interpolation/CubicSpline.cs @@ -28,9 +28,6 @@ * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ -using System; -using System.Collections.Generic; - namespace Numerics.Data { /// @@ -78,7 +75,7 @@ public CubicSpline(IList xValues, IList yValues, SortOrder sortO /// /// Stores the array of second derivatives. /// - private double[] y2; + private double[] y2 = Array.Empty(); /// /// Auxiliary routine to set the second derivatives. If you make changes to the x- or y-values, then you need to call this routine afterwards. diff --git a/Numerics/Data/Paired Data/OrderedPairedData.cs b/Numerics/Data/Paired Data/OrderedPairedData.cs index d269941f..8632cda5 100644 --- a/Numerics/Data/Paired Data/OrderedPairedData.cs +++ b/Numerics/Data/Paired Data/OrderedPairedData.cs @@ -35,6 +35,7 @@ using System.Data; using System.Linq; using System.Xml.Linq; +using System.Xml.Serialization; using Numerics.Distributions; namespace Numerics.Data @@ -102,7 +103,7 @@ public class OrderedPairedData : IList, INotifyCollectionChanged private bool _strictY; private SortOrder _orderX; private SortOrder _orderY; - private List _ordinates; + private readonly List _ordinates; /// public event NotifyCollectionChangedEventHandler? CollectionChanged; @@ -265,20 +266,24 @@ public OrderedPairedData(XElement el) { // Get Strictness bool strict = false; - if (el.Attribute(nameof(StrictX)) != null) { bool.TryParse(el.Attribute(nameof(StrictX)).Value, out strict); } + var strictXAttr = el.Attribute(nameof(StrictX)); + if (strictXAttr != null) { bool.TryParse(strictXAttr.Value, out strict); } StrictX = strict; strict = false; - if (el.Attribute(nameof(StrictY)) != null) { bool.TryParse(el.Attribute(nameof(StrictY)).Value, out strict); } + var strictYAttr = el.Attribute(nameof(StrictY)); + if (strictYAttr != null) { bool.TryParse(strictYAttr.Value, out strict); } StrictY = strict; // Get Order SortOrder order = SortOrder.None; - if (el.Attribute(nameof(OrderX)) != null) { Enum.TryParse(el.Attribute(nameof(OrderX)).Value, out order); } + var orderXAttr = el.Attribute(nameof(OrderX)); + if (orderXAttr != null) { Enum.TryParse(orderXAttr.Value, out order); } OrderX = order; order = SortOrder.None; - if (el.Attribute(nameof(OrderY)) != null) { Enum.TryParse(el.Attribute(nameof(OrderY)).Value, out order); } + var orderYAttr = el.Attribute(nameof(OrderY)); + if (orderYAttr != null) { Enum.TryParse(orderYAttr.Value, out order); } OrderY = order; // Ordinates @@ -1432,15 +1437,14 @@ private double TriangleArea(Ordinate point1, Ordinate point2, Ordinate point3) /// and number of points in the search region. public OrderedPairedData LangSimplify(double tolerance, int lookAhead) { - if (_ordinates == null | lookAhead <= 1 | tolerance <= 0) - return this; + if (lookAhead <= 1 | tolerance <= 0) { return this; } List ordinates = new List(); int count = _ordinates.Count; int offset; - if (lookAhead > count - 1) - lookAhead = count - 1; + if (lookAhead > count - 1) { lookAhead = count - 1; } + ordinates.Add(_ordinates[0]); for (int i = 0; i < count; i++) diff --git a/Numerics/Data/Paired Data/Ordinate.cs b/Numerics/Data/Paired Data/Ordinate.cs index 8a7530cb..153787ae 100644 --- a/Numerics/Data/Paired Data/Ordinate.cs +++ b/Numerics/Data/Paired Data/Ordinate.cs @@ -79,9 +79,10 @@ public Ordinate(double xValue, double yValue) public Ordinate(XElement xElement) { double x = 0, y = 0; - - if (xElement.Attribute(nameof(X)) != null) double.TryParse(xElement.Attribute(nameof(X))?.Value, NumberStyles.Any, CultureInfo.InvariantCulture, out x); - if (xElement.Attribute(nameof(Y)) != null) double.TryParse(xElement.Attribute(nameof(Y))?.Value, NumberStyles.Any, CultureInfo.InvariantCulture, out y); + var xAttribute = xElement.Attribute(nameof(X)); + var yAttribute = xElement.Attribute(nameof(Y)); + if (xAttribute != null) double.TryParse(xAttribute.Value, NumberStyles.Any, CultureInfo.InvariantCulture, out x); + if (yAttribute != null) double.TryParse(yAttribute.Value, NumberStyles.Any, CultureInfo.InvariantCulture, out y); X = x; Y = y; IsValid = true; diff --git a/Numerics/Data/Paired Data/UncertainOrderedPairedData.cs b/Numerics/Data/Paired Data/UncertainOrderedPairedData.cs index ed442c8d..4d9ab6a2 100644 --- a/Numerics/Data/Paired Data/UncertainOrderedPairedData.cs +++ b/Numerics/Data/Paired Data/UncertainOrderedPairedData.cs @@ -32,6 +32,7 @@ using System.Collections; using System.Collections.Generic; using System.Collections.Specialized; +using System.Data; using System.Globalization; using System.Linq; using System.Xml.Linq; @@ -171,7 +172,7 @@ public SortOrder OrderY /// /// Handles the event of CollectionChanged /// - public event NotifyCollectionChangedEventHandler CollectionChanged; + public event NotifyCollectionChangedEventHandler? CollectionChanged; #endregion @@ -243,7 +244,12 @@ public UncertainOrderedPairedData(IList data, bool strictOnX, _orderY = yOrder; _uncertainOrdinates = new List(data.Count); for (int i = 0; i < data.Count; i++) - _uncertainOrdinates.Add(new UncertainOrdinate(data[i].X, data[i].Y.Clone())); + { + var o = data[i]; + UnivariateDistributionBase? yValue = o.Y?.Clone(); + if (yValue is not null) { _uncertainOrdinates.Add(new UncertainOrdinate(o.X, yValue)); } + } + Validate(); } @@ -266,8 +272,12 @@ private UncertainOrderedPairedData(IList data, bool strictOnX _orderY = yOrder; _uncertainOrdinates = new List(data.Count); for (int i = 0; i < data.Count; i++) - _uncertainOrdinates.Add(new UncertainOrdinate(data[i].X, data[i].Y.Clone())); - + { + var o = data[i]; + UnivariateDistributionBase? yValue = o.Y?.Clone(); + if (yValue is not null) { _uncertainOrdinates.Add(new UncertainOrdinate(o.X, yValue)); } + } + _isValid = dataValid; } @@ -277,34 +287,44 @@ private UncertainOrderedPairedData(IList data, bool strictOnX /// The XElement the UncertainOrderPairedData object is being created from. public UncertainOrderedPairedData(XElement el) { + var strictX = el.Attribute("X_Strict"); // Get Order - if (el.Attribute("X_Strict") != null) - bool.TryParse(el.Attribute("X_Strict").Value, out _strictX); - if (el.Attribute("Y_Strict") != null) - bool.TryParse(el.Attribute("Y_Strict").Value, out _strictY); + if (strictX != null) { bool.TryParse(strictX.Value, out _strictX); } + + var strictY = el.Attribute("Y_Strict"); + if (strictY != null) { bool.TryParse(strictY.Value, out _strictY); } + // Get Strictness - if (el.Attribute("X_Order") != null) - Enum.TryParse(el.Attribute("X_Order").Value, out _orderX); - if (el.Attribute("Y_Order") != null) - Enum.TryParse(el.Attribute("Y_Order").Value, out _orderY); + var orderX = el.Attribute("X_Order"); + if (orderX != null) { Enum.TryParse(orderX.Value, out _orderX); } + + var orderY = el.Attribute("Y_Order"); + if (orderY != null) { Enum.TryParse(orderY.Value, out _orderY); } + // Distribution type Distribution = UnivariateDistributionType.Deterministic; - if (el.Attribute("Distribution") != null) + var distributionAttr = el.Attribute("Distribution"); + if (distributionAttr != null) { var argresult = Distribution; - Enum.TryParse(el.Attribute("Distribution").Value, out argresult); + Enum.TryParse(distributionAttr.Value, out argresult); Distribution = argresult; } // new prop - - if (el.Attribute(nameof(AllowDifferentDistributionTypes)) != null) + var allowDiffAtr = el.Attribute(nameof(AllowDifferentDistributionTypes)); + if (allowDiffAtr != null) { - bool.TryParse(el.Attribute(nameof(AllowDifferentDistributionTypes)).Value, out _allowDifferentDistributionTypes); + bool.TryParse(allowDiffAtr.Value, out _allowDifferentDistributionTypes); // Get Ordinates var curveEl = el.Element("Ordinates"); _uncertainOrdinates = new List(); - foreach (XElement ord in curveEl.Elements(nameof(UncertainOrdinate))) - _uncertainOrdinates.Add(new UncertainOrdinate(ord)); + + if (curveEl != null) + { + foreach (XElement ord in curveEl.Elements(nameof(UncertainOrdinate))) + _uncertainOrdinates.Add(new UncertainOrdinate(ord)); + } + } else { @@ -315,15 +335,19 @@ public UncertainOrderedPairedData(XElement el) { foreach (XElement o in curveEl.Elements("Ordinate")) { - double.TryParse(o.Attribute("X").Value, NumberStyles.Any, CultureInfo.InvariantCulture, out var xout); - xData.Add(xout); + var xAttr = o.Attribute("X"); + if ( xAttr != null && double.TryParse(xAttr.Value, NumberStyles.Any, CultureInfo.InvariantCulture, out var xout)) { xData.Add(xout); } + else { xData.Add(0.0); } + var dist = UnivariateDistributionFactory.CreateDistribution(Distribution); var props = dist.GetParameterPropertyNames; var paramVals = new double[(props.Count())]; + for (int i = 0; i < props.Count(); i++) { - double.TryParse(o.Attribute(props[i]).Value, NumberStyles.Any, CultureInfo.InvariantCulture, out var result); - paramVals[i] = result; + var pAttr = o.Attribute(props[i]); + if ( pAttr != null && double.TryParse(pAttr.Value, NumberStyles.Any, CultureInfo.InvariantCulture, out var result)) { paramVals[i] = result; } + else { paramVals[i] = 0.0; } } dist.SetParameters(paramVals); @@ -488,7 +512,14 @@ public List GetErrors() { if (left._uncertainOrdinates[i].X != right._uncertainOrdinates[i].X) return false; - if (left._uncertainOrdinates[i].Y == right._uncertainOrdinates[i].Y == false) + + var leftY = left._uncertainOrdinates[i].Y; + var rightY = right._uncertainOrdinates[i].Y; + if (leftY is null && rightY is null) + continue; + if (leftY is null || rightY is null) + return false; + if (!leftY.Equals(rightY)) return false; } return true; @@ -510,7 +541,7 @@ public List GetErrors() /// /// The object to compare with the current object. /// True if the specified object is equal to the current object; otherwise, False. - public override bool Equals(object obj) + public override bool Equals(object? obj) { if (obj is UncertainOrderedPairedData other) { diff --git a/Numerics/Data/Paired Data/UncertainOrdinate.cs b/Numerics/Data/Paired Data/UncertainOrdinate.cs index 1395f697..d3a81f7a 100644 --- a/Numerics/Data/Paired Data/UncertainOrdinate.cs +++ b/Numerics/Data/Paired Data/UncertainOrdinate.cs @@ -68,9 +68,7 @@ public UncertainOrdinate(double xValue, UnivariateDistributionBase yValue) { X = xValue; Y = yValue; - IsValid = true; - if (double.IsInfinity(X) || double.IsNaN(X) || Y == null || Y.ParametersValid == false) - IsValid = false; + IsValid = !(double.IsInfinity(X) || double.IsNaN(X) || Y is null || !Y.ParametersValid); } /// @@ -79,16 +77,18 @@ public UncertainOrdinate(double xValue, UnivariateDistributionBase yValue) /// The XElement to deserialize. public UncertainOrdinate(XElement xElement) { + var xAttr = xElement.Attribute(nameof(X)); double x = 0; - UnivariateDistributionBase dist = null; - if (xElement.Attribute(nameof(X)) != null) double.TryParse(xElement.Attribute(nameof(X)).Value, NumberStyles.Any, CultureInfo.InvariantCulture, out x); - if (xElement.Element("Distribution") != null) { dist = UnivariateDistributionFactory.CreateDistribution(xElement.Element("Distribution")); } + UnivariateDistributionBase? dist = null; + if (xAttr != null) { double.TryParse(xAttr.Value, NumberStyles.Any, CultureInfo.InvariantCulture, out x); } + + var distEl = xElement.Element("Distribution"); + if (distEl != null) { dist = UnivariateDistributionFactory.CreateDistribution(distEl); } // X = x; Y = dist; - IsValid = true; - if (double.IsInfinity(X) || double.IsNaN(X) || Y == null || Y.ParametersValid == false) - IsValid = false; + + IsValid = !(double.IsInfinity(X) || double.IsNaN(X) || Y is null || !Y.ParametersValid); } /// @@ -99,7 +99,8 @@ public UncertainOrdinate(XElement xElement) public UncertainOrdinate(XElement xElement, UnivariateDistributionType distributionType) { double x = 0; - if (xElement.Attribute(nameof(X)) != null) double.TryParse(xElement.Attribute(nameof(X)).Value, NumberStyles.Any, CultureInfo.InvariantCulture, out x); + var xElAttr = xElement.Attribute(nameof(X)); + if (xElAttr != null) double.TryParse(xElAttr.Value, NumberStyles.Any, CultureInfo.InvariantCulture, out x); // backwards compatibility var dist = UnivariateDistributionFactory.CreateDistribution(distributionType); var props = dist.GetParameterPropertyNames; @@ -107,7 +108,8 @@ public UncertainOrdinate(XElement xElement, UnivariateDistributionType distribut for (int i = 0; i < props.Count(); i++) { double p = 0; - if (xElement.Attribute(props[i]) != null) double.TryParse(xElement.Attribute(props[i]).Value, NumberStyles.Any, CultureInfo.InvariantCulture, out p); + var xElPropsAttr = xElement.Attribute(props[i]); + if (xElPropsAttr != null) { double.TryParse(xElPropsAttr.Value, NumberStyles.Any, CultureInfo.InvariantCulture, out p); } paramVals[i] = p; } dist.SetParameters(paramVals); @@ -115,7 +117,7 @@ public UncertainOrdinate(XElement xElement, UnivariateDistributionType distribut X = x; Y = dist; IsValid = true; - if (double.IsInfinity(X) || double.IsNaN(X) || Y == null || Y.ParametersValid == false) + if (double.IsInfinity(X) || double.IsNaN(X) || Y is null || Y.ParametersValid == false) IsValid = false; } @@ -132,7 +134,7 @@ public UncertainOrdinate(XElement xElement, UnivariateDistributionType distribut /// /// Y distribution. /// - public UnivariateDistributionBase Y; + public UnivariateDistributionBase? Y; /// /// Boolean indicating if the ordinate has valid numeric values or not. @@ -151,6 +153,10 @@ public UncertainOrdinate(XElement xElement, UnivariateDistributionType distribut /// A 'sampled' ordinate value. public Ordinate GetOrdinate(double probability) { + if (Y is null) + { + throw new InvalidOperationException("Y distribution is not defined."); + } return new Ordinate(X, Y.InverseCDF(probability)); } @@ -160,6 +166,10 @@ public Ordinate GetOrdinate(double probability) /// A mean ordinate value. public Ordinate GetOrdinate() { + if (Y is null) + { + throw new InvalidOperationException("Y distribution is not defined."); + } return new Ordinate(X, Y.Mean); } @@ -182,9 +192,11 @@ public bool OrdinateValid(UncertainOrdinate ordinateToCompare, bool strictX, boo if (ordinateToCompare.IsValid == false) return false; // Check for equivalent distribution types - if (allowDifferentTypes == false && ordinateToCompare.Y.Type != Y.Type) + if (allowDifferentTypes == false && (ordinateToCompare.Y is null || Y is null || ordinateToCompare.Y.Type != Y.Type)) return false; + if (Y is null || ordinateToCompare.Y is null) + return false; double minPercentile = Y.Type == UnivariateDistributionType.PertPercentile || Y.Type == UnivariateDistributionType.PertPercentileZ ? 0.05 : 1E-5; // Test reasonable lower bound @@ -223,7 +235,7 @@ public List OrdinateErrors(UncertainOrdinate ordinateToCompare, bool str result.Add("Ordinate X value can not be infinity."); if (double.IsNaN(ordinateToCompare.X)) result.Add("Ordinate X value must be a valid number."); - if (ordinateToCompare.Y == null) + if (ordinateToCompare.Y is null) { result.Add("Ordinate Y value must be defined."); } @@ -234,6 +246,12 @@ public List OrdinateErrors(UncertainOrdinate ordinateToCompare, bool str } } // Check for equivalent distribution types + if(ordinateToCompare.Y is null || Y is null) + { + result.Add("Ordinate Y value must be defined."); + return result; + } + if (allowDifferentTypes == false && ordinateToCompare.Y.Type != Y.Type) result.Add("Can't compare two ordinates with different distribution types."); // Return False // @@ -266,7 +284,7 @@ public List OrdinateErrors() result.Add("Ordinate X value can not be infinity."); if (double.IsNaN(X)) result.Add("Ordinate X value must be a valid number."); - if (Y == null) + if (Y is null) result.Add("Ordinate Y value must be defined."); else if (Y.ParametersValid == false) { @@ -286,9 +304,13 @@ public List OrdinateErrors() /// True if two objects are numerically equal; otherwise, False. public static bool operator ==(UncertainOrdinate left, UncertainOrdinate right) { - //if (left == null || right == null) return false; + if (left.X != right.X) return false; + if (left.Y is null && right.Y is null) + return true; + if (left.Y is null || right.Y is null) + return false; if (left.Y != right.Y) return false; return true; @@ -310,7 +332,7 @@ public List OrdinateErrors() /// /// The object to compare with the current object. /// True if the specified object is equal to the current object; otherwise, False. - public override bool Equals(object obj) + public override bool Equals(object? obj) { if (obj is UncertainOrdinate other) { @@ -329,7 +351,7 @@ public override int GetHashCode() { int hash = 17; hash = hash * 23 + X.GetHashCode(); - hash = hash * 23 + Y.GetHashCode(); + hash = hash * 23 + (Y is not null ? Y.GetHashCode() : 0); return hash; } } @@ -341,7 +363,10 @@ public XElement ToXElement() { var result = new XElement(nameof(UncertainOrdinate)); result.SetAttributeValue(nameof(X), X.ToString("G17", CultureInfo.InvariantCulture)); - result.Add(Y.ToXElement()); + if (Y is not null) + { + result.Add(Y.ToXElement()); + } return result; } diff --git a/Numerics/Data/Regression/LinearRegression.cs b/Numerics/Data/Regression/LinearRegression.cs index d33a0442..50ec19cd 100644 --- a/Numerics/Data/Regression/LinearRegression.cs +++ b/Numerics/Data/Regression/LinearRegression.cs @@ -117,32 +117,32 @@ public LinearRegression(Matrix x, Vector y, bool hasIntercept = true) /// /// The list of estimated parameter values. /// - public List Parameters { get; private set; } + public List Parameters { get; private set; } = Array.Empty().ToList(); /// /// The list of the estimated parameter names. /// - public List ParameterNames { get; private set; } + public List ParameterNames { get; private set; } /// /// The list of the estimated parameter standard errors. /// - public List ParameterStandardErrors { get; private set; } + public List ParameterStandardErrors { get; private set; } = Array.Empty().ToList(); /// /// The list of the estimated parameter t-statistics. /// - public List ParameterTStats { get; private set; } + public List ParameterTStats { get; private set; } = Array.Empty().ToList(); /// /// The estimate parameter covariance matrix. /// - public Matrix Covariance { get; private set; } + public Matrix Covariance { get; private set; } = new Matrix(0, 0); /// /// The residuals of the fitted linear model. /// - public double[] Residuals { get; private set; } + public double[] Residuals { get; private set; } = Array.Empty(); /// /// The model standard error. diff --git a/Numerics/Data/Statistics/Autocorrelation.cs b/Numerics/Data/Statistics/Autocorrelation.cs index c2f08cd2..c08daec1 100644 --- a/Numerics/Data/Statistics/Autocorrelation.cs +++ b/Numerics/Data/Statistics/Autocorrelation.cs @@ -94,7 +94,7 @@ public enum Type /// A n x 2 matrix, with being the number of given input data points. The first column contains the lag and the /// second column contains the function evaluated at the given values. /// - public static double[,] Function(IList data, int lagMax = -1, Type type = Type.Correlation) + public static double[,]? Function(IList data, int lagMax = -1, Type type = Type.Correlation) { if (type == Type.Correlation) { @@ -123,7 +123,7 @@ public enum Type /// A n x 2 matrix, with being the number of given input data points. The first column contains the lag and the /// second column contains the function evaluated at the given values. /// - public static double[,] Function(TimeSeries timeSeries, int lagMax = -1, Type type = Type.Correlation) + public static double[,]? Function(TimeSeries timeSeries, int lagMax = -1, Type type = Type.Correlation) { if (type == Type.Correlation) { @@ -150,7 +150,7 @@ public enum Type /// A n x 2 matrix, with being the number of given input data points. The first column contains the lag and the /// second column contains the covariance of the given values. /// - private static double[,] Covariance(IList data, int lagMax = -1) + private static double[,]? Covariance(IList data, int lagMax = -1) { int n = data.Count; if (lagMax < 0) lagMax = (int)Math.Floor(Math.Min(10d * Math.Log10(n), n - 1)); @@ -177,7 +177,7 @@ public enum Type /// A n x 2 matrix, with being the number of given input data points. The first column contains the lag and the /// second column contains the covariance of the given values. /// - private static double[,] Covariance(TimeSeries timeSeries, int lagMax = -1) + private static double[,]? Covariance(TimeSeries timeSeries, int lagMax = -1) { int n = timeSeries.Count; if (lagMax < 0) lagMax = (int)Math.Floor(Math.Min(10d * Math.Log10(n), n - 1)); @@ -205,12 +205,15 @@ public enum Type /// A n x 2 matrix, with being the number of given input data points. The first column contains the lag and the /// second column contains the autocorrelation of the given values. /// - private static double[,] Correlation(IList data, int lagMax = -1) + private static double[,]? Correlation(IList data, int lagMax = -1) { int n = data.Count; if (lagMax < 0) lagMax = (int)Math.Floor(Math.Min(10d * Math.Log10(n), n - 1)); if (lagMax < 1 || n < 2) return null; var acf = Covariance(data, lagMax); + + if (acf == null) return null; + double den = acf[0, 1]; for (int i = 0; i < acf.GetLength(0); i++) acf[i, 1] /= den; @@ -226,12 +229,15 @@ public enum Type /// A n x 2 matrix, with being the number of given input data points. The first column contains the lag and the /// second column contains the autocorrelation of the given values. /// - private static double[,] Correlation(TimeSeries timeSeries, int lagMax = -1) + private static double[,]? Correlation(TimeSeries timeSeries, int lagMax = -1) { int n = timeSeries.Count; if (lagMax < 0) lagMax = (int)Math.Floor(Math.Min(10d * Math.Log10(n), n - 1)); if (lagMax < 1 || n < 2) return null; + var acf = Covariance(timeSeries, lagMax); + if (acf == null) return null; + double den = acf[0, 1]; for (int i = 0; i < acf.GetLength(0); i++) acf[i, 1] /= den; @@ -247,13 +253,14 @@ public enum Type /// A n x 2 matrix, with being the number of given input data points. The first column contains the lag and the /// second column contains the partial autocorrelation of the given values. /// - private static double[,] Partial(IList data, int lagMax = -1) + private static double[,]? Partial(IList data, int lagMax = -1) { int n = data.Count; if (lagMax < 0) lagMax = (int)Math.Floor(Math.Min(10d * Math.Log10(n), n - 1)); if (lagMax < 1 || n < 2) return null; // First compute the ACVF var acvf = Covariance(data, lagMax); + if (acvf == null) return null; // Then compute PACF using the Durbin-Levinson algorithm int i, j; var phis = new double[lagMax + 1]; @@ -293,13 +300,14 @@ public enum Type /// A n x 2 matrix, with being the number of given input data points. The first column contains the lag and the /// second column contains the partial autocorrelation of the given values. /// - private static double[,] Partial(TimeSeries timeSeries, int lagMax = -1) + private static double[,]? Partial(TimeSeries timeSeries, int lagMax = -1) { int n = timeSeries.Count; if (lagMax < 0) lagMax = (int)Math.Floor(Math.Min(10d * Math.Log10(n), n - 1)); if (lagMax < 1 || n < 2) return null; // First compute the ACVF var acvf = Covariance(timeSeries, lagMax); + if (acvf == null) return null; // Then compute PACF using the Durbin-Levinson algorithm int i, j; var phis = new double[lagMax + 1]; diff --git a/Numerics/Data/Statistics/Histogram.cs b/Numerics/Data/Statistics/Histogram.cs index 1ff14e83..201ff926 100644 --- a/Numerics/Data/Statistics/Histogram.cs +++ b/Numerics/Data/Statistics/Histogram.cs @@ -119,8 +119,10 @@ public double Midpoint /// +1 if this bin is lower than the compared bin. /// -1 otherwise. /// - public int CompareTo(Bin other) + public int CompareTo(Bin? other) { + if (other is null) { return 1; } + if (UpperBound > other.LowerBound && LowerBound < other.LowerBound) { throw new ArgumentException(nameof(other), "The bins cannot be overlapping."); @@ -149,7 +151,7 @@ public object Clone() /// Checks whether two histogram bins are equal. /// /// True if the bins are equal and false otherwise. - public override bool Equals(object obj) + public override bool Equals(object? obj) { if (!(obj is Bin)) { diff --git a/Numerics/Data/Statistics/HypothesisTests.cs b/Numerics/Data/Statistics/HypothesisTests.cs index 97d00084..6c50254b 100644 --- a/Numerics/Data/Statistics/HypothesisTests.cs +++ b/Numerics/Data/Statistics/HypothesisTests.cs @@ -245,6 +245,7 @@ public static double LjungBoxTest(IList sample, int lagMax = -1) int n = sample.Count; if (lagMax < 0) lagMax = (int)Math.Floor(Math.Min(10d * Math.Log10(n), n - 1)); var acf = Autocorrelation.Function(sample, lagMax, Autocorrelation.Type.Correlation); + if (acf == null) throw new Exception("Autocorrelation function could not be calculated."); double Q = 0; for (int k = 1; k <= lagMax; k++) Q += Tools.Sqr(acf[k, 1]) / (n - k); diff --git a/Numerics/Data/Statistics/PlottingPositions.cs b/Numerics/Data/Statistics/PlottingPositions.cs index fac4557c..aa74f137 100644 --- a/Numerics/Data/Statistics/PlottingPositions.cs +++ b/Numerics/Data/Statistics/PlottingPositions.cs @@ -82,7 +82,7 @@ public static double[] Function(int N, double alpha) /// The sample size. /// The plotting position formula type. /// An array of plotting positions of size N. - public static double[] Function(int N, PlottingPositions.PlottingPostionType plottingPostionType) + public static double[]? Function(int N, PlottingPositions.PlottingPostionType plottingPostionType) { if (plottingPostionType == PlottingPostionType.Blom) { diff --git a/Numerics/Data/Statistics/Probability.cs b/Numerics/Data/Statistics/Probability.cs index 9f33dbb3..77e9c4c6 100644 --- a/Numerics/Data/Statistics/Probability.cs +++ b/Numerics/Data/Statistics/Probability.cs @@ -184,7 +184,7 @@ public static double JointProbability(IList probabilities, DependencyTyp /// The correlation matrix defining the dependency. Default = null. /// The dependency type. Default = Correlation matrix. /// The joint probability. - public static double JointProbability(IList probabilities, int[] indicators, double[,] correlationMatrix = null, DependencyType dependency = DependencyType.CorrelationMatrix) + public static double JointProbability(IList probabilities, int[] indicators, double[,]? correlationMatrix = null, DependencyType dependency = DependencyType.CorrelationMatrix) { if (dependency == DependencyType.CorrelationMatrix && correlationMatrix != null) { @@ -312,7 +312,7 @@ public static double NegativeJointProbability(IList probabilities, int[] /// /// This method utilizes a modified version of Pandey's PCM method. /// - public static double JointProbabilityHPCM(IList probabilities, int[] indicators, double[,] correlationMatrix, double[] conditionalProbabilities = null) + public static double JointProbabilityHPCM(IList probabilities, int[] indicators, double[,] correlationMatrix, double[]? conditionalProbabilities = null) { // Validation Checks if (probabilities == null || probabilities.Count == 0) @@ -431,7 +431,7 @@ public static double JointProbabilityHPCM(IList probabilities, int[] ind /// /// The joint probability of the events, adjusted for dependencies as defined by the correlation matrix. The return value is between 0 and 1. /// - public static double JointProbabilityPCM(IList probabilities, int[] indicators, double[,] correlationMatrix, double[] conditionalProbabilities = null) + public static double JointProbabilityPCM(IList probabilities, int[] indicators, double[,] correlationMatrix, double[]? conditionalProbabilities = null) { // Validation Checks if (probabilities == null || probabilities.Count == 0) @@ -1899,7 +1899,7 @@ public static double CommonCauseAdjustment(IList probabilities) /// The correlation matrix defining the dependency. /// The dependency type. Default = Correlation matrix. /// The common cause adjustment factor. - public static double CommonCauseAdjustment(IList probabilities, double[,] correlationMatrix = null, DependencyType dependency = DependencyType.CorrelationMatrix) + public static double CommonCauseAdjustment(IList probabilities, double[,]? correlationMatrix = null, DependencyType dependency = DependencyType.CorrelationMatrix) { // Validation Checks if (probabilities == null || probabilities.Count == 0) diff --git a/Numerics/Data/Statistics/Statistics.cs b/Numerics/Data/Statistics/Statistics.cs index a45dc06d..1ab3e9af 100644 --- a/Numerics/Data/Statistics/Statistics.cs +++ b/Numerics/Data/Statistics/Statistics.cs @@ -368,7 +368,7 @@ public static double JackKnifeStandardError(IList data, Func /// Sample of data, no sorting is assumed. /// The statistic for estimating a sample. - public static double[] JackKnifeSample(IList data, Func, double> statistic) + public static double[]? JackKnifeSample(IList data, Func, double> statistic) { if (data == null) throw new ArgumentNullException(nameof(data)); if (data.Count == 0) return null; diff --git a/Numerics/Data/Time Series/Support/Series.cs b/Numerics/Data/Time Series/Support/Series.cs index 05685a06..a78cd46f 100644 --- a/Numerics/Data/Time Series/Support/Series.cs +++ b/Numerics/Data/Time Series/Support/Series.cs @@ -56,7 +56,7 @@ public abstract class Series : IList> _seriesOrdinates = new List>(); /// - public event NotifyCollectionChangedEventHandler CollectionChanged; + public event NotifyCollectionChangedEventHandler? CollectionChanged; /// public SeriesOrdinate this[int index] @@ -75,11 +75,13 @@ public SeriesOrdinate this[int index] } /// - object IList.this[int index] + object? IList.this[int index] { get { return _seriesOrdinates[index]; } set { + if (value is null) { throw new ArgumentNullException(nameof(value)); } + if (value.GetType() != typeof(SeriesOrdinate)) { if (_seriesOrdinates[index] != (SeriesOrdinate)value) @@ -109,7 +111,7 @@ object IList.this[int index] public bool IsFixedSize => false; /// - public object SyncRoot => _seriesOrdinates.Count > 0 ? _seriesOrdinates[0] : null; + public object SyncRoot => _seriesOrdinates.Count > 0 ? _seriesOrdinates[0]! : new object(); /// public bool IsSynchronized => false; @@ -117,14 +119,15 @@ object IList.this[int index] /// public virtual void Add(SeriesOrdinate item) { - if (item == null) throw new ArgumentNullException(nameof(item)); + if (item is null) throw new ArgumentNullException(nameof(item)); _seriesOrdinates.Add(item); RaiseCollectionChanged(new NotifyCollectionChangedEventArgs(NotifyCollectionChangedAction.Add, item, _seriesOrdinates.Count - 1)); } /// - public int Add(object item) + public int Add(object? item) { + if (item is null) throw new ArgumentNullException(nameof(item)); if (item.GetType() != typeof(SeriesOrdinate)) { return -1; } Add((SeriesOrdinate)item); return _seriesOrdinates.Count - 1; @@ -133,14 +136,15 @@ public int Add(object item) /// public virtual void Insert(int index, SeriesOrdinate item) { - if (item == null) throw new ArgumentNullException(nameof(item)); + if (item is null) throw new ArgumentNullException(nameof(item)); _seriesOrdinates.Insert(index, item); RaiseCollectionChanged(new NotifyCollectionChangedEventArgs(NotifyCollectionChangedAction.Add, item, index)); } /// - public void Insert(int index, object item) + public void Insert(int index, object? item) { + if (item is null) throw new ArgumentNullException(nameof(item)); if (item.GetType() == typeof(SeriesOrdinate)) { Insert(index, (SeriesOrdinate)item); @@ -160,8 +164,9 @@ public virtual bool Remove(SeriesOrdinate item) } /// - public void Remove(object item) + public void Remove(object? item) { + if (item is null) throw new ArgumentNullException(nameof(item)); if (item.GetType() == typeof(SeriesOrdinate)) { Remove((SeriesOrdinate)item); @@ -195,8 +200,9 @@ public bool Contains(SeriesOrdinate item) } /// - public bool Contains(object item) + public bool Contains(object? item) { + if (item is null) throw new ArgumentNullException(nameof(item)); if (item.GetType() == typeof(SeriesOrdinate)) { return Contains((SeriesOrdinate)item); @@ -226,8 +232,9 @@ public int IndexOf(SeriesOrdinate item) } /// - public int IndexOf(object item) + public int IndexOf(object? item) { + if (item is null) throw new ArgumentNullException(nameof(item)); if (item.GetType() == typeof(SeriesOrdinate)) { return _seriesOrdinates.IndexOf((SeriesOrdinate)item); diff --git a/Numerics/Data/Time Series/Support/SeriesOrdinate.cs b/Numerics/Data/Time Series/Support/SeriesOrdinate.cs index 9dc8c2d8..8cbde3f1 100644 --- a/Numerics/Data/Time Series/Support/SeriesOrdinate.cs +++ b/Numerics/Data/Time Series/Support/SeriesOrdinate.cs @@ -50,7 +50,11 @@ public class SeriesOrdinate : INotifyPropertyChanged, IEquatable /// /// Constructs a new series ordinate. /// - public SeriesOrdinate() { } + public SeriesOrdinate() + { + _index = default!; + _value = default!; + } /// /// Constructs a new series ordinate. @@ -74,7 +78,7 @@ public SeriesOrdinate(TIndex index, TValue value) protected TValue _value; /// - public event PropertyChangedEventHandler PropertyChanged; + public event PropertyChangedEventHandler? PropertyChanged; /// /// The index of the series ordinate. @@ -109,7 +113,7 @@ public virtual TValue Value } /// - public bool Equals(SeriesOrdinate other) + public bool Equals(SeriesOrdinate? other) { if (ReferenceEquals(other, null)) return false; if (ReferenceEquals(this, other)) return true; @@ -118,7 +122,7 @@ public bool Equals(SeriesOrdinate other) } /// - public override bool Equals(object obj) => Equals(obj as SeriesOrdinate); + public override bool Equals(object? obj) => Equals((SeriesOrdinate?)obj); /// /// Equality operator overload. @@ -143,8 +147,8 @@ public override int GetHashCode() unchecked { int hash = 17; - hash = hash * 23 + EqualityComparer.Default.GetHashCode(_index); - hash = hash * 23 + EqualityComparer.Default.GetHashCode(_value); + hash = hash * 23 + (_index is null ? 0 : EqualityComparer.Default.GetHashCode(_index)); + hash = hash * 23 + (_value is null ? 0 : EqualityComparer.Default.GetHashCode(_value)); return hash; } } diff --git a/Numerics/Data/Time Series/Support/TimeSeriesDownload.cs b/Numerics/Data/Time Series/Support/TimeSeriesDownload.cs index 41f3ea1b..dff08c99 100644 --- a/Numerics/Data/Time Series/Support/TimeSeriesDownload.cs +++ b/Numerics/Data/Time Series/Support/TimeSeriesDownload.cs @@ -173,11 +173,7 @@ public enum HeightUnit /// A downloaded time series. public static async Task FromGHCN(string siteNumber, TimeSeriesType timeSeriesType = TimeSeriesType.DailyPrecipitation, DepthUnit unit = DepthUnit.Inches) { - // Check internet connection - if (!await IsConnectedToInternet()) - { - throw new InvalidOperationException("No internet connection."); - } + // Check site number if (siteNumber.Length != 11) @@ -195,6 +191,12 @@ public static async Task FromGHCN(string siteNumber, TimeSeriesType DateTime? previousDate = null; string tempFilePath = Path.Combine(Path.GetTempPath(), $"{siteNumber}.dly"); + // Check internet connection + if (!await IsConnectedToInternet()) + { + throw new InvalidOperationException("No internet connection."); + } + try { @@ -353,11 +355,7 @@ private static string CreateURLForUSGSDownload(string siteNumber, TimeSeriesType /// The time series type. public static async Task<(TimeSeries TimeSeries, string RawText)> FromUSGS(string siteNumber, TimeSeriesType timeSeriesType = TimeSeriesType.DailyDischarge) { - // Check internet connection - if (!await IsConnectedToInternet()) - { - throw new InvalidOperationException("No internet connection."); - } + // Check site number if (siteNumber.Length != 8) @@ -371,6 +369,12 @@ private static string CreateURLForUSGSDownload(string siteNumber, TimeSeriesType throw new ArgumentException("The time series type cannot be daily precipitation or daily snow.", nameof(timeSeriesType)); } + // Check internet connection + if (!await IsConnectedToInternet()) + { + throw new InvalidOperationException("No internet connection."); + } + var timeSeries = new TimeSeries(); string textDownload = ""; @@ -403,7 +407,7 @@ private static string CreateURLForUSGSDownload(string siteNumber, TimeSeriesType using (GZipStream decompressionStream = new GZipStream(compressedStream, CompressionMode.Decompress)) using (StreamReader reader = new StreamReader(decompressionStream)) { - string line; + string? line; bool isHeader = true; while ((line = await reader.ReadLineAsync()) != null) @@ -742,9 +746,7 @@ public static async Task FromABOM( DateTime? startDate = null, DateTime? endDate = null) { - // Check connectivity - if (!await IsConnectedToInternet()) - throw new InvalidOperationException("No internet connection."); + // Validate station number (BOM station numbers are typically 6 digits) if (string.IsNullOrWhiteSpace(stationNumber) || stationNumber.Length < 6) @@ -754,6 +756,10 @@ public static async Task FromABOM( if (timeSeriesType != TimeSeriesType.DailyDischarge && timeSeriesType != TimeSeriesType.DailyStage) throw new ArgumentException("BOM API supports DailyDischarge or DailyStage only.", nameof(timeSeriesType)); + // Check connectivity + if (!await IsConnectedToInternet()) + throw new InvalidOperationException("No internet connection."); + // Set default dates DateTime sd = startDate ?? new DateTime(1800, 1, 1); DateTime ed = endDate ?? DateTime.Today; @@ -770,7 +776,7 @@ public static async Task FromABOM( $"&station_no={Uri.EscapeDataString(stationNumber)}" + $"¶metertype_name={Uri.EscapeDataString(parameterType)}"; - string tsId = null; + string? tsId = null; // Create HttpClientHandler with automatic decompression var handler = new HttpClientHandler @@ -840,7 +846,7 @@ public static async Task FromABOM( for (int i = 0; i < headers.GetArrayLength(); i++) { - string header = headers[i].GetString(); + string? header = headers[i].GetString(); if (header == "ts_id") tsIdIndex = i; if (header == "ts_name") tsNameIndex = i; } @@ -852,8 +858,9 @@ public static async Task FromABOM( for (int i = 1; i < root.GetArrayLength(); i++) { var row = root[i]; - string tsName = tsNameIndex >= 0 ? row[tsNameIndex].GetString() : ""; + string? tsName = tsNameIndex >= 0 ? row[tsNameIndex].GetString() : ""; + if (tsName == null) continue; // Prioritize: DMQaQc.Merged.DailyMean.24HR or similar daily mean series if (tsName.Contains("DailyMean") || tsName.Contains("Daily Mean")) { @@ -940,7 +947,7 @@ public static async Task FromABOM( if (point.GetArrayLength() < 2) continue; // Parse timestamp - string timestampStr = point[0].GetString(); + string? timestampStr = point[0].GetString(); if (!DateTime.TryParse(timestampStr, out DateTime date)) continue; diff --git a/Numerics/Data/Time Series/TimeSeries.cs b/Numerics/Data/Time Series/TimeSeries.cs index ec18039a..53f1bb4d 100644 --- a/Numerics/Data/Time Series/TimeSeries.cs +++ b/Numerics/Data/Time Series/TimeSeries.cs @@ -141,20 +141,32 @@ public TimeSeries(TimeInterval timeInterval, DateTime startDate, IList d public TimeSeries(XElement xElement) { // Get time interval - if (xElement.Attribute(nameof(TimeInterval)) != null) - Enum.TryParse(xElement.Attribute(nameof(TimeInterval)).Value, out _timeInterval); + var timeIntervalAttr = xElement.Attribute(nameof(TimeInterval)); + if (timeIntervalAttr != null) + Enum.TryParse(timeIntervalAttr.Value, out _timeInterval); // Get Ordinates foreach (XElement ordinate in xElement.Elements("SeriesOrdinate")) { // Try to parse the invariant date string using TryParseExact // If it fails, do a regular try parse. - DateTime index; - if (!DateTime.TryParseExact(ordinate.Attribute("Index").Value, "o", CultureInfo.InvariantCulture, DateTimeStyles.RoundtripKind, out index)) + DateTime index = default; + var ordAttr = ordinate.Attribute("Index"); + if (ordAttr != null) { - DateTime.TryParse(ordinate.Attribute("Index").Value, out index); + if (!DateTime.TryParseExact(ordAttr.Value, "o", CultureInfo.InvariantCulture, DateTimeStyles.RoundtripKind, out index)) + { + DateTime.TryParse(ordAttr.Value, out index); + } + } + + double value = 0.0; + var ordVal = ordinate.Attribute("Value"); + if (ordVal != null) + { + double.TryParse(ordVal.Value, NumberStyles.Any, CultureInfo.InvariantCulture, out value); } - double.TryParse(ordinate.Attribute("Value").Value, NumberStyles.Any, CultureInfo.InvariantCulture, out var value); + Add(new SeriesOrdinate(index, value)); } } @@ -1115,7 +1127,7 @@ public TimeSeries ClipTimeSeries(DateTime startDate, DateTime endDate) /// The new time interval. /// Optional. Determines if values should be averaged (true) or cumulated (false) for larger time steps. Default = true. /// A new TimeSeries object with the new interval. - public TimeSeries ConvertTimeInterval(TimeInterval timeInterval, bool average = true) + public TimeSeries? ConvertTimeInterval(TimeInterval timeInterval, bool average = true) { var TS = TimeSeries.TimeIntervalInHours(TimeInterval); // The time step in hours var newTS = TimeSeries.TimeIntervalInHours(timeInterval); // The new time step in hours @@ -1467,7 +1479,7 @@ public TimeSeries CalendarYearSeries(BlockFunctionType blockFunction = BlockFunc var result = new TimeSeries(TimeInterval.Irregular); // First, perform smoothing function - TimeSeries smoothedSeries = null; + TimeSeries? smoothedSeries = null; if (smoothingFunction == SmoothingFunctionType.None) { smoothedSeries = Clone(); @@ -1486,6 +1498,7 @@ public TimeSeries CalendarYearSeries(BlockFunctionType blockFunction = BlockFunc } // Then, perform block function + if (smoothedSeries == null) return result; for (int i = smoothedSeries.StartDate.Year; i <= smoothedSeries.EndDate.Year; i++) { var blockData = smoothedSeries.Where(x => x.Index.Year == i).ToList(); @@ -1561,7 +1574,7 @@ public TimeSeries CustomYearSeries(int startMonth = 10, BlockFunctionType blockF var result = new TimeSeries(TimeInterval.Irregular); // First, perform smoothing function - TimeSeries smoothedSeries = null; + TimeSeries? smoothedSeries = null; if (smoothingFunction == SmoothingFunctionType.None) { smoothedSeries = Clone(); @@ -1580,6 +1593,7 @@ public TimeSeries CustomYearSeries(int startMonth = 10, BlockFunctionType blockF } // Then, shift the dates + if( smoothedSeries == null) return result; int shift = startMonth != 1 ? 12 - startMonth + 1 : 0; smoothedSeries = startMonth != 1 ? smoothedSeries.ShiftDatesByMonth(shift) : smoothedSeries; @@ -1667,7 +1681,7 @@ public TimeSeries CustomYearSeries(int startMonth, int endMonth, BlockFunctionTy var result = new TimeSeries(TimeInterval.Irregular); // First, perform smoothing function - TimeSeries smoothedSeries = null; + TimeSeries? smoothedSeries = null; if (smoothingFunction == SmoothingFunctionType.None) { smoothedSeries = Clone(); @@ -1686,6 +1700,7 @@ public TimeSeries CustomYearSeries(int startMonth, int endMonth, BlockFunctionTy } // Then, perform block function + if(smoothedSeries == null) return result; for (int i = smoothedSeries.StartDate.Year; i <= smoothedSeries.EndDate.Year; i++) { @@ -1779,7 +1794,7 @@ public TimeSeries MonthlySeries(BlockFunctionType blockFunction = BlockFunctionT var result = new TimeSeries(TimeInterval.Irregular); // Create smoothed series - TimeSeries smoothedSeries = null; + TimeSeries? smoothedSeries = null; if (smoothingFunction == SmoothingFunctionType.None) { smoothedSeries = Clone(); @@ -1797,6 +1812,7 @@ public TimeSeries MonthlySeries(BlockFunctionType blockFunction = BlockFunctionT smoothedSeries = Difference(period); } + if(smoothedSeries == null) return result; for (int i = smoothedSeries.StartDate.Year; i <= smoothedSeries.EndDate.Year; i++) { @@ -1875,7 +1891,7 @@ public TimeSeries QuarterlySeries(BlockFunctionType blockFunction = BlockFunctio var result = new TimeSeries(TimeInterval.Irregular); // Create smoothed series - TimeSeries smoothedSeries = null; + TimeSeries? smoothedSeries = null; if (smoothingFunction == SmoothingFunctionType.None) { smoothedSeries = Clone(); @@ -1893,6 +1909,7 @@ public TimeSeries QuarterlySeries(BlockFunctionType blockFunction = BlockFunctio smoothedSeries = Difference(period); } + if (smoothedSeries == null) return result; for (int i = smoothedSeries.StartDate.Year; i <= smoothedSeries.EndDate.Year; i++) { @@ -1987,7 +2004,7 @@ public TimeSeries QuarterlySeries(BlockFunctionType blockFunction = BlockFunctio public TimeSeries PeaksOverThresholdSeries(double threshold, int minStepsBetweenEvents = 1, SmoothingFunctionType smoothingFunction = SmoothingFunctionType.None, int period = 1) { // Create smoothed time series - TimeSeries smoothedSeries = null; + TimeSeries? smoothedSeries = null; if (smoothingFunction == SmoothingFunctionType.None) { smoothedSeries = Clone(); @@ -2009,6 +2026,7 @@ public TimeSeries PeaksOverThresholdSeries(double threshold, int minStepsBetween int i = 0, idx, idxMax; var clusters = new List(); + if(smoothedSeries == null) return new TimeSeries(TimeInterval.Irregular); while (i < smoothedSeries.Count) { if (!double.IsNaN(smoothedSeries[i].Value) && smoothedSeries[i].Value > threshold) diff --git a/Numerics/Distributions/Bivariate Copulas/AMHCopula.cs b/Numerics/Distributions/Bivariate Copulas/AMHCopula.cs index e1239d57..3b6ab3da 100644 --- a/Numerics/Distributions/Bivariate Copulas/AMHCopula.cs +++ b/Numerics/Distributions/Bivariate Copulas/AMHCopula.cs @@ -122,7 +122,7 @@ public override ArgumentOutOfRangeException ValidateParameter(double parameter, if (throwException) throw new ArgumentOutOfRangeException(nameof(Theta), "The dependency parameter θ (theta) must be less than or equal to " + ThetaMaximum.ToString() + "."); return new ArgumentOutOfRangeException(nameof(Theta), "The dependency parameter θ (theta) must be less than or equal to " + ThetaMaximum.ToString() + "."); } - return null; + return new ArgumentOutOfRangeException(nameof(Theta), "Parameter is valid."); } /// diff --git a/Numerics/Distributions/Bivariate Copulas/Base/ArchimedeanCopula.cs b/Numerics/Distributions/Bivariate Copulas/Base/ArchimedeanCopula.cs index 6bd55b8e..5b23539a 100644 --- a/Numerics/Distributions/Bivariate Copulas/Base/ArchimedeanCopula.cs +++ b/Numerics/Distributions/Bivariate Copulas/Base/ArchimedeanCopula.cs @@ -78,7 +78,7 @@ public override ArgumentOutOfRangeException ValidateParameter(double parameter, if (throwException) throw new ArgumentOutOfRangeException(nameof(Theta), "The dependency parameter θ (theta) must be less than or equal to " + ThetaMaximum.ToString() + "."); return new ArgumentOutOfRangeException(nameof(Theta), "The dependency parameter θ (theta) must be less than or equal to " + ThetaMaximum.ToString() + "."); } - return null; + return new ArgumentOutOfRangeException(nameof(Theta),"Parameter is valid"); } /// diff --git a/Numerics/Distributions/Bivariate Copulas/Base/BivariateCopula.cs b/Numerics/Distributions/Bivariate Copulas/Base/BivariateCopula.cs index 4015be24..60944869 100644 --- a/Numerics/Distributions/Bivariate Copulas/Base/BivariateCopula.cs +++ b/Numerics/Distributions/Bivariate Copulas/Base/BivariateCopula.cs @@ -108,10 +108,10 @@ public bool ParametersValid } /// - public virtual IUnivariateDistribution MarginalDistributionX { get; set; } + public virtual IUnivariateDistribution MarginalDistributionX { get; set; } = null!; /// - public virtual IUnivariateDistribution MarginalDistributionY { get; set; } + public virtual IUnivariateDistribution MarginalDistributionY { get; set; } = null!; /// public abstract string DisplayName { get; } diff --git a/Numerics/Distributions/Bivariate Copulas/FrankCopula.cs b/Numerics/Distributions/Bivariate Copulas/FrankCopula.cs index 6e0185fa..0b6a06a6 100644 --- a/Numerics/Distributions/Bivariate Copulas/FrankCopula.cs +++ b/Numerics/Distributions/Bivariate Copulas/FrankCopula.cs @@ -121,7 +121,7 @@ public override ArgumentOutOfRangeException ValidateParameter(double parameter, if (throwException) throw new ArgumentOutOfRangeException(nameof(Theta), "The dependency parameter θ (theta) must be less than or equal to " + ThetaMaximum.ToString() + "."); return new ArgumentOutOfRangeException(nameof(Theta), "The dependency parameter θ (theta) must be less than or equal to " + ThetaMaximum.ToString() + "."); } - return null; + return new ArgumentOutOfRangeException(nameof(Theta),"Parameter is valid."); } /// diff --git a/Numerics/Distributions/Bivariate Copulas/NormalCopula.cs b/Numerics/Distributions/Bivariate Copulas/NormalCopula.cs index 4eaaf0c0..7215ba75 100644 --- a/Numerics/Distributions/Bivariate Copulas/NormalCopula.cs +++ b/Numerics/Distributions/Bivariate Copulas/NormalCopula.cs @@ -137,7 +137,7 @@ public override ArgumentOutOfRangeException ValidateParameter(double parameter, if (throwException) throw new ArgumentOutOfRangeException(nameof(Theta), "The correlation parameter ρ (rho) must be less than " + ThetaMaximum.ToString() + "."); return new ArgumentOutOfRangeException(nameof(Theta), "The correlation parameter ρ (rho) must be less than " + ThetaMaximum.ToString() + "."); } - return null; + return new ArgumentOutOfRangeException(nameof(Theta),"The parameter is valid."); } /// diff --git a/Numerics/Distributions/Multivariate/BivariateEmpirical.cs b/Numerics/Distributions/Multivariate/BivariateEmpirical.cs index 8b334842..7a6e1d63 100644 --- a/Numerics/Distributions/Multivariate/BivariateEmpirical.cs +++ b/Numerics/Distributions/Multivariate/BivariateEmpirical.cs @@ -60,6 +60,9 @@ public BivariateEmpirical(Transform x1Transform = Transform.None, Transform x2Tr X1Transform = x1Transform; X2Transform = x2Transform; ProbabilityTransform = probabilityTransform; + X1Values = Array.Empty(); + X2Values = Array.Empty(); + ProbabilityValues = new double[0, 0]; } /// @@ -84,26 +87,26 @@ public BivariateEmpirical(IList x1Values, IList x2Values, double // ... // X1n P(n,1) P(n,n) - private Bilinear bilinear = null; + private Bilinear? bilinear = null; private bool _parametersValid = true; /// /// Return the array of X1 values (distribution 1). Points On the cumulative curve are specified /// with increasing value and increasing probability. /// - public double[] X1Values { get; private set; } + public double[] X1Values { get; private set; } = Array.Empty(); /// /// Return the array of X2 values (distribution 2). Points on the cumulative curve are specified /// with increasing value and increasing probability. /// - public double[] X2Values { get; private set; } + public double[] X2Values { get; private set; } = Array.Empty(); /// /// Return the array of probability values. Points on the cumulative curve are specified /// with increasing value and increasing probability. /// - public double[,] ProbabilityValues { get; private set; } + public double[,] ProbabilityValues { get; private set; } = new double[0, 0]; /// /// Determines the interpolation transform for the X1-values. @@ -183,7 +186,7 @@ public void SetParameters(IList x1Values, IList x2Values, double /// Array of X2 values. The X2-values represent the secondary values. There are columns in the table of probability values. /// Array of probability values. Range 0 ≤ p ≤ 1. /// Determines whether to throw an exception or not. - public ArgumentOutOfRangeException ValidateParameters(IList x1Values, IList x2Values, double[,] pValues, bool throwException) + public ArgumentOutOfRangeException? ValidateParameters(IList x1Values, IList x2Values, double[,] pValues, bool throwException) { if (x1Values.Count < 2) diff --git a/Numerics/Distributions/Multivariate/MultivariateNormal.cs b/Numerics/Distributions/Multivariate/MultivariateNormal.cs index 841a95d0..dbdcad75 100644 --- a/Numerics/Distributions/Multivariate/MultivariateNormal.cs +++ b/Numerics/Distributions/Multivariate/MultivariateNormal.cs @@ -89,24 +89,24 @@ public MultivariateNormal(double[] mean, double[,] covariance) private bool _parametersValid = true; private int _dimension = 0; - private double[] _mean; - private Matrix _covariance; + private double[] _mean = Array.Empty(); + private Matrix _covariance = Matrix.Identity(0); - private CholeskyDecomposition _cholesky; + private CholeskyDecomposition? _cholesky; private double _lnconstant; - private double[] _variance; - private double[] _standardDeviation; + private double[] _variance = Array.Empty(); + private double[] _standardDeviation = Array.Empty(); // variables required for the multivariate CDF - private Matrix _correlation; - private double[] _correl; + private Matrix _correlation = Matrix.Identity(0); + private double[] _correl = Array.Empty(); private Random _MVNUNI = new MersenneTwister(); private int _maxEvaluations = 100000; private double _absoluteError = 1E-4; private double _relativeError = 1E-4; - private double[] _lower; - private double[] _upper; - private int[] _infin; + private double[] _lower = Array.Empty(); + private double[] _upper = Array.Empty(); + private int[] _infin = Array.Empty(); private bool _correlationMatrixCreated = false; private bool _covSRTed = false; @@ -250,7 +250,9 @@ public double[] StandardDeviation /// /// Determines if the covariance matrix is positive definite. /// - public bool IsPositiveDefinite => _cholesky.IsPositiveDefinite; + /// var chol = _cholesky ?? throw new InvalidOperationException("Parameters not set."); + + public bool IsPositiveDefinite => _cholesky != null && _cholesky.IsPositiveDefinite; /// /// Set the distribution parameters. @@ -265,6 +267,16 @@ public void SetParameters(double[] mean, double[,] covariance) _dimension = mean.Length; _mean = mean; _covariance = new Matrix(covariance); + + _variance = new double[_dimension]; + _standardDeviation = new double[_dimension]; + for (int i = 0; i < _dimension; i++) + { + // assuming Matrix supports indexer [row,col] + _variance[i] = _covariance[i, i]; + _standardDeviation[i] = Math.Sqrt(_variance[i]); + } + _cholesky = new CholeskyDecomposition(_covariance); double lndet = _cholesky.LogDeterminant(); _lnconstant = -(Math.Log(2d * Math.PI) * _mean.Length + lndet) * 0.5d; @@ -349,7 +361,7 @@ public ArgumentOutOfRangeException ValidateParameters(double[] mean, double[,] c var ex = new ArgumentOutOfRangeException(nameof(Covariance), "Covariance matrix is not positive-definite."); if (throwException) throw ex; else return ex; } - return null; + return null!; } /// @@ -384,6 +396,8 @@ public double Mahalanobis(double[] x) var z = new double[_mean.Length]; for (int i = 0; i < x.Length; i++) z[i] = x[i] - _mean[i]; + if(_cholesky == null) + throw new InvalidOperationException("Parameters not set."); var a = _cholesky.Solve(new Vector(z)); double b = 0d; for (int i = 0; i < z.Length; i++) @@ -475,7 +489,9 @@ public double[] InverseCDF(double[] probabilities) var z = new double[Dimension]; for (int j = 0; j < Dimension; j++) z[j] = Normal.StandardZ(probabilities[j]); - // x = A*z + mu + + if (_cholesky == null) + throw new InvalidOperationException("Parameters not set."); var Az = _cholesky.L * z; for (int j = 0; j < Dimension; j++) sample[j] = Az[j] + _mean[j]; @@ -538,6 +554,8 @@ public static MultivariateNormal Bivariate(double mu1, double mu2, double sigma1 for (int j = 0; j < Dimension; j++) z[j] = Normal.StandardZ(rnd.NextDouble()); // x = A*z + mu + if (_cholesky == null) + throw new InvalidOperationException("Parameters not set."); var Az = _cholesky.L * z; for (int j = 0; j < Dimension; j++) sample[i, j] = Az[j] + _mean[j]; @@ -566,6 +584,8 @@ public static MultivariateNormal Bivariate(double mu1, double mu2, double sigma1 for (int j = 0; j < Dimension; j++) z[j] = Normal.StandardZ(r[i, j]); // x = A*z + mu + if(_cholesky == null) + throw new InvalidOperationException("Parameters not set."); var Az = _cholesky.L * z; for (int j = 0; j < Dimension; j++) sample[i, j] = Az[j] + _mean[j]; @@ -601,6 +621,8 @@ public static MultivariateNormal Bivariate(double mu1, double mu2, double sigma1 } } // x = A*z + mu + if(_cholesky == null) + throw new InvalidOperationException("Parameters not set."); var Az = _cholesky.L * z; for (int j = 0; j < Dimension; j++) sample[i, j] = Az[j] + _mean[j]; diff --git a/Numerics/Distributions/Univariate/Base/UnivariateDistributionBase.cs b/Numerics/Distributions/Univariate/Base/UnivariateDistributionBase.cs index 1a5cc312..08b96017 100644 --- a/Numerics/Distributions/Univariate/Base/UnivariateDistributionBase.cs +++ b/Numerics/Distributions/Univariate/Base/UnivariateDistributionBase.cs @@ -816,7 +816,7 @@ public virtual XElement ToXElement() /// /// The object to compare with the current object. /// True if the specified object is equal to the current object; otherwise, False. - public override bool Equals(object obj) + public override bool Equals(object? obj) { if (obj is UnivariateDistributionBase other) { @@ -839,7 +839,7 @@ public override int GetHashCode() if (Type == UnivariateDistributionType.Empirical) { var empirical = this as EmpiricalDistribution; - if (empirical != null) + if (empirical is not null) { foreach (var x in empirical.XValues) { @@ -871,9 +871,9 @@ public override int GetHashCode() /// /// A negative number if this instance's mean is less than the other; zero if equal; a positive number if greater. /// - public int CompareTo(UnivariateDistributionBase other) + public int CompareTo(UnivariateDistributionBase? other) { - if (other == null) + if (other is null) return 1; // non-null instance is considered greater than null return this.Mean.CompareTo(other.Mean); diff --git a/Numerics/Distributions/Univariate/Base/UnivariateDistributionFactory.cs b/Numerics/Distributions/Univariate/Base/UnivariateDistributionFactory.cs index 91a79eab..84570380 100644 --- a/Numerics/Distributions/Univariate/Base/UnivariateDistributionFactory.cs +++ b/Numerics/Distributions/Univariate/Base/UnivariateDistributionFactory.cs @@ -204,7 +204,10 @@ public static UnivariateDistributionBase CreateDistribution(UnivariateDistributi { distribution = new Weibull(); } - + if (distribution is null) + { + throw new ArgumentException("Distribution is not found."); + } return distribution; } @@ -215,13 +218,14 @@ public static UnivariateDistributionBase CreateDistribution(UnivariateDistributi /// /// A univariate distribution. /// - public static UnivariateDistributionBase CreateDistribution(XElement xElement) + public static UnivariateDistributionBase? CreateDistribution(XElement xElement) { UnivariateDistributionType type; - UnivariateDistributionBase dist = null; - if (xElement.Attribute(nameof(UnivariateDistributionBase.Type)) != null) + UnivariateDistributionBase? dist = null; + var xAttr = xElement.Attribute(nameof(UnivariateDistributionBase.Type)); + if (xAttr != null) { - Enum.TryParse(xElement.Attribute(nameof(UnivariateDistributionBase.Type)).Value, out type); + Enum.TryParse(xAttr.Value, out type); if (type == UnivariateDistributionType.Mixture) { @@ -249,14 +253,19 @@ public static UnivariateDistributionBase CreateDistribution(XElement xElement) } } + if (dist is null) + { + throw new ArgumentException("Distribution is not found."); + } var names = dist.GetParameterPropertyNames; var parms = dist.GetParameters; var vals = new double[dist.NumberOfParameters]; for (int i = 0; i < dist.NumberOfParameters; i++) { - if (xElement.Attribute(names[i]) != null) + var xAttrParm = xElement.Attribute(names[i]); + if (xAttrParm != null) { - double.TryParse(xElement.Attribute(names[i]).Value, System.Globalization.NumberStyles.Any, System.Globalization.CultureInfo.InvariantCulture, out vals[i]); + double.TryParse(xAttrParm.Value, System.Globalization.NumberStyles.Any, System.Globalization.CultureInfo.InvariantCulture, out vals[i]); } } dist.SetParameters(vals); diff --git a/Numerics/Distributions/Univariate/Bernoulli.cs b/Numerics/Distributions/Univariate/Bernoulli.cs index 68ed1473..72d9268e 100644 --- a/Numerics/Distributions/Univariate/Bernoulli.cs +++ b/Numerics/Distributions/Univariate/Bernoulli.cs @@ -225,7 +225,7 @@ public override ArgumentOutOfRangeException ValidateParameters(IList par throw new ArgumentOutOfRangeException(nameof(Probability), "Probability must be between 0 and 1."); return new ArgumentOutOfRangeException(nameof(Probability), "Probability must be between 0 and 1."); } - return null; + return null!; } /// diff --git a/Numerics/Distributions/Univariate/BetaDistribution.cs b/Numerics/Distributions/Univariate/BetaDistribution.cs index f09d1ba3..6cb56ba6 100644 --- a/Numerics/Distributions/Univariate/BetaDistribution.cs +++ b/Numerics/Distributions/Univariate/BetaDistribution.cs @@ -255,7 +255,7 @@ public override ArgumentOutOfRangeException ValidateParameters(IList par return new ArgumentOutOfRangeException(nameof(Beta), "The shape parameter β (beta) must be positive."); } // - return null; + return null!; } /// diff --git a/Numerics/Distributions/Univariate/Binomial.cs b/Numerics/Distributions/Univariate/Binomial.cs index baa238d7..9ffc70cd 100644 --- a/Numerics/Distributions/Univariate/Binomial.cs +++ b/Numerics/Distributions/Univariate/Binomial.cs @@ -256,7 +256,7 @@ public override ArgumentOutOfRangeException ValidateParameters(IList par throw new ArgumentOutOfRangeException(nameof(ProbabilityOfSuccess), "The number of trials (n) must be positive."); return new ArgumentOutOfRangeException(nameof(ProbabilityOfSuccess), "The number of trials (n) must be positive."); } - return null; + return null!; } /// diff --git a/Numerics/Distributions/Univariate/Cauchy.cs b/Numerics/Distributions/Univariate/Cauchy.cs index 836784b1..639c725a 100644 --- a/Numerics/Distributions/Univariate/Cauchy.cs +++ b/Numerics/Distributions/Univariate/Cauchy.cs @@ -230,7 +230,7 @@ public override ArgumentOutOfRangeException ValidateParameters(IList par throw new ArgumentOutOfRangeException(nameof(Gamma), "The scale parameter γ (gamma) must be positive."); return new ArgumentOutOfRangeException(nameof(Gamma), "The scale parameter γ (gamma) must be positive."); } - return null; + return null!; } /// diff --git a/Numerics/Distributions/Univariate/ChiSquared.cs b/Numerics/Distributions/Univariate/ChiSquared.cs index 6a080b89..1f6cdaa7 100644 --- a/Numerics/Distributions/Univariate/ChiSquared.cs +++ b/Numerics/Distributions/Univariate/ChiSquared.cs @@ -242,7 +242,7 @@ public ArgumentOutOfRangeException ValidateParameters(int degreesOfFreedom, bool return new ArgumentOutOfRangeException(nameof(degreesOfFreedom), "The degrees of freedom ν (nu) must greater than or equal to one."); } - return null; + return null!; } /// diff --git a/Numerics/Distributions/Univariate/CompetingRisks.cs b/Numerics/Distributions/Univariate/CompetingRisks.cs index 8a713821..e0ec9b98 100644 --- a/Numerics/Distributions/Univariate/CompetingRisks.cs +++ b/Numerics/Distributions/Univariate/CompetingRisks.cs @@ -79,14 +79,14 @@ public CompetingRisks(IUnivariateDistribution[] distributions) SetParameters(distributions); } - private UnivariateDistributionBase[] _distributions; - private EmpiricalDistribution _empiricalCDF; + private UnivariateDistributionBase[] _distributions = Array.Empty(); + private EmpiricalDistribution _empiricalCDF = null!; private bool _momentsComputed = false; private double u1, u2, u3, u4; private bool _empiricalCDFCreated = false; - private double[,] _correlationMatrix; + private double[,] _correlationMatrix = null!; private bool _mvnCreated = false; - private MultivariateNormal _mvn; + private MultivariateNormal _mvn = null!; /// /// Returns the array of univariate probability distributions. @@ -422,7 +422,7 @@ public override ArgumentOutOfRangeException ValidateParameters(IList par return new ArgumentOutOfRangeException(nameof(Distributions), "One of the distributions have invalid parameters."); } } - return null; + return null!; } /// @@ -620,7 +620,7 @@ public override double InverseCDF(double probability) /// Returns a list of cumulative incidence functions. /// /// Optional. The stratification bins to integrate over. Default is 200 bins. - public List CumulativeIncidenceFunctions(List bins = null) + public List CumulativeIncidenceFunctions(List? bins = null) { // Get stratification bins if (bins == null) @@ -928,7 +928,7 @@ public override UnivariateDistributionBase Clone() ProbabilityTransform = ProbabilityTransform }; if (CorrelationMatrix != null) - cr.CorrelationMatrix = CorrelationMatrix.Clone() as double[,]; + cr.CorrelationMatrix = (double[,]) CorrelationMatrix.Clone(); return cr; } @@ -989,20 +989,22 @@ public override XElement ToXElement() /// /// The XElement to deserialize. /// A new competing risks distribution. - public static CompetingRisks FromXElement(XElement xElement) + public static CompetingRisks? FromXElement(XElement xElement) { UnivariateDistributionType type = UnivariateDistributionType.Deterministic; - if (xElement.Attribute(nameof(UnivariateDistributionBase.Type)) != null) + var xElAttr = xElement.Attribute(nameof(UnivariateDistributionBase.Type)); + if (xElAttr != null) { - Enum.TryParse(xElement.Attribute(nameof(UnivariateDistributionBase.Type)).Value, out type); + Enum.TryParse(xElAttr.Value, out type); } if (type == UnivariateDistributionType.CompetingRisks) { var distributions = new List(); - if (xElement.Attribute(nameof(Distributions)) != null) + var xDistAttr = xElement.Attribute(nameof(Distributions)); + if (xDistAttr != null) { - var types = xElement.Attribute(nameof(Distributions)).Value.Split('|'); + var types = xDistAttr.Value.Split('|'); for (int i = 0; i < types.Length; i++) { Enum.TryParse(types[i], out UnivariateDistributionType distType); @@ -1013,29 +1015,46 @@ public static CompetingRisks FromXElement(XElement xElement) if (xElement.Attribute(nameof(XTransform)) != null) { - Enum.TryParse(xElement.Attribute(nameof(XTransform)).Value, out Transform xTransform); - competingRisks.XTransform = xTransform; + var xTransformAttr = xElement.Attribute(nameof(XTransform)); + if (xTransformAttr != null) + { + Enum.TryParse(xTransformAttr.Value, out Transform xTransform); + competingRisks.XTransform = xTransform; + } } if (xElement.Attribute(nameof(ProbabilityTransform)) != null) { - Enum.TryParse(xElement.Attribute(nameof(ProbabilityTransform)).Value, out Transform probabilityTransform); - competingRisks.ProbabilityTransform = probabilityTransform; + var xProbabilityAttr = xElement.Attribute(nameof(ProbabilityTransform)); + if (xProbabilityAttr != null) + { + Enum.TryParse(xProbabilityAttr.Value, out Transform probabilityTransform); + competingRisks.ProbabilityTransform = probabilityTransform; + } } if (xElement.Attribute(nameof(MinimumOfRandomVariables)) != null) { - bool.TryParse(xElement.Attribute(nameof(MinimumOfRandomVariables)).Value, out bool minOfValues); - competingRisks.MinimumOfRandomVariables = minOfValues; + var xMinOfAttr = xElement.Attribute(nameof(MinimumOfRandomVariables)); + if (xMinOfAttr != null) + { + bool.TryParse(xMinOfAttr.Value, out bool minOfValues); + competingRisks.MinimumOfRandomVariables = minOfValues; + } } if (xElement.Attribute(nameof(Dependency)) != null) { - Enum.TryParse(xElement.Attribute(nameof(Dependency)).Value, out Probability.DependencyType dependency); - competingRisks.Dependency = dependency; + var xDependencyAttr = xElement.Attribute(nameof(Dependency)); + if (xDependencyAttr != null) + { + Enum.TryParse(xDependencyAttr.Value, out Probability.DependencyType dependency); + competingRisks.Dependency = dependency; + } } // Parameters - if (xElement.Attribute("Parameters") != null) - { - var vals = xElement.Attribute("Parameters").Value.Split('|'); + var xParametersAttr = xElement.Attribute("Parameters"); + if (xParametersAttr != null) + { + var vals = xParametersAttr.Value.Split('|'); var parameters = new List(); for (int i = 0; i < vals.Length; i++) { diff --git a/Numerics/Distributions/Univariate/Deterministic.cs b/Numerics/Distributions/Univariate/Deterministic.cs index 77bd7da7..99cdad1e 100644 --- a/Numerics/Distributions/Univariate/Deterministic.cs +++ b/Numerics/Distributions/Univariate/Deterministic.cs @@ -238,7 +238,7 @@ public override ArgumentOutOfRangeException ValidateParameters(IList par throw new ArgumentOutOfRangeException(nameof(Probability), "The point value must be a number."); return new ArgumentOutOfRangeException(nameof(Probability), "The point value must be a number."); } - return null; + return null!; } /// diff --git a/Numerics/Distributions/Univariate/EmpiricalDistribution.cs b/Numerics/Distributions/Univariate/EmpiricalDistribution.cs index 4627d6dc..af05b7e9 100644 --- a/Numerics/Distributions/Univariate/EmpiricalDistribution.cs +++ b/Numerics/Distributions/Univariate/EmpiricalDistribution.cs @@ -155,14 +155,19 @@ public EmpiricalDistribution(IList sample, PlottingPositions.PlottingPos { _xValues = sample.ToArray(); Array.Sort(_xValues); - _pValues = PlottingPositions.Function(_xValues.Count(), plottingPostionType); + + var pValues = PlottingPositions.Function(_xValues.Count(), plottingPostionType); + + if (pValues is null) { throw new InvalidOperationException("PlottingPositions.Function returned null."); } + _pValues = pValues; + opd = new OrderedPairedData(_xValues, _pValues, true, SortOrder.Ascending, true, SortOrder.Ascending); _momentsComputed = false; } - private double[] _xValues; - private double[] _pValues; - private OrderedPairedData opd; + private double[] _xValues = Array.Empty(); + private double[] _pValues = Array.Empty(); + private OrderedPairedData opd = default!; private bool _momentsComputed = false; private double u1, u2, u3, u4; @@ -407,7 +412,7 @@ public override void SetParameters(IList parameters) /// public override ArgumentOutOfRangeException ValidateParameters(IList parameters, bool throwException) { - return null; + return new ArgumentOutOfRangeException("The parameters are valid"); } /// @@ -699,7 +704,7 @@ public static EmpiricalDistribution Convolve(IList distri throw new ArgumentException("Distribution list cannot be null or empty.", nameof(distributions)); if (distributions.Count == 1) - return distributions[0].Clone() as EmpiricalDistribution; + return (EmpiricalDistribution) distributions[0].Clone(); if (numberOfPoints < 2) throw new ArgumentException("Number of points must be at least 2.", nameof(numberOfPoints)); diff --git a/Numerics/Distributions/Univariate/Exponential.cs b/Numerics/Distributions/Univariate/Exponential.cs index 5dbc109d..7bf019d1 100644 --- a/Numerics/Distributions/Univariate/Exponential.cs +++ b/Numerics/Distributions/Univariate/Exponential.cs @@ -285,7 +285,7 @@ public override ArgumentOutOfRangeException ValidateParameters(IList par throw new ArgumentOutOfRangeException(nameof(Alpha), "The scale parameter α (alpha) must be positive."); return new ArgumentOutOfRangeException(nameof(Alpha), "The scale parameter α (alpha) must be positive."); } - return null; + return null!; } /// diff --git a/Numerics/Distributions/Univariate/GammaDistribution.cs b/Numerics/Distributions/Univariate/GammaDistribution.cs index 6e23df39..5b8419a0 100644 --- a/Numerics/Distributions/Univariate/GammaDistribution.cs +++ b/Numerics/Distributions/Univariate/GammaDistribution.cs @@ -389,7 +389,7 @@ public ArgumentOutOfRangeException ValidateParameters(double scale, double shape throw new ArgumentOutOfRangeException(nameof(Kappa), "The shape parameter κ (kappa) must be positive."); return new ArgumentOutOfRangeException(nameof(Kappa), "The shape parameter κ (kappa) must be positive."); } - return null; + return null!; } /// diff --git a/Numerics/Distributions/Univariate/GeneralizedBeta.cs b/Numerics/Distributions/Univariate/GeneralizedBeta.cs index 2c569aa6..24dd2472 100644 --- a/Numerics/Distributions/Univariate/GeneralizedBeta.cs +++ b/Numerics/Distributions/Univariate/GeneralizedBeta.cs @@ -399,7 +399,7 @@ public ArgumentOutOfRangeException ValidateParameters(double alpha, double beta, throw new ArgumentOutOfRangeException(nameof(Min), "The min cannot be greater than or equal to the max."); return new ArgumentOutOfRangeException(nameof(Min), "The min cannot be greater than or equal to the max."); } - return null; + return null!; } /// diff --git a/Numerics/Distributions/Univariate/GeneralizedExtremeValue.cs b/Numerics/Distributions/Univariate/GeneralizedExtremeValue.cs index 18dd7c54..3d89127e 100644 --- a/Numerics/Distributions/Univariate/GeneralizedExtremeValue.cs +++ b/Numerics/Distributions/Univariate/GeneralizedExtremeValue.cs @@ -420,7 +420,7 @@ public ArgumentOutOfRangeException ValidateParameters(double location, double sc throw new ArgumentOutOfRangeException(nameof(Kappa), "The shape parameter κ (kappa) must be a number."); return new ArgumentOutOfRangeException(nameof(Kappa), "The shape parameter κ (kappa) must be a number."); } - return null; + return null!; } /// diff --git a/Numerics/Distributions/Univariate/GeneralizedLogistic.cs b/Numerics/Distributions/Univariate/GeneralizedLogistic.cs index 0b4c9b4c..81093b4a 100644 --- a/Numerics/Distributions/Univariate/GeneralizedLogistic.cs +++ b/Numerics/Distributions/Univariate/GeneralizedLogistic.cs @@ -412,7 +412,7 @@ public ArgumentOutOfRangeException ValidateParameters(double location, double sc throw new ArgumentOutOfRangeException(nameof(Kappa), "The shape parameter κ (kappa) must be a number."); return new ArgumentOutOfRangeException(nameof(Kappa), "The shape parameter κ (kappa) must be a number."); } - return null; + return null!; } /// diff --git a/Numerics/Distributions/Univariate/GeneralizedNormal.cs b/Numerics/Distributions/Univariate/GeneralizedNormal.cs index 2ac643d8..b6888930 100644 --- a/Numerics/Distributions/Univariate/GeneralizedNormal.cs +++ b/Numerics/Distributions/Univariate/GeneralizedNormal.cs @@ -368,7 +368,7 @@ public ArgumentOutOfRangeException ValidateParameters(double location, double sc throw new ArgumentOutOfRangeException(nameof(Kappa), "The shape parameter κ (kappa) must be a number."); return new ArgumentOutOfRangeException(nameof(Kappa), "The shape parameter κ (kappa) must be a number."); } - return null; + return null!; } /// diff --git a/Numerics/Distributions/Univariate/GeneralizedPareto.cs b/Numerics/Distributions/Univariate/GeneralizedPareto.cs index 220a3c0a..bcdb1fac 100644 --- a/Numerics/Distributions/Univariate/GeneralizedPareto.cs +++ b/Numerics/Distributions/Univariate/GeneralizedPareto.cs @@ -412,7 +412,7 @@ public ArgumentOutOfRangeException ValidateParameters(double location, double sc throw new ArgumentOutOfRangeException(nameof(Kappa), "The shape parameter κ (kappa) must be a number."); return new ArgumentOutOfRangeException(nameof(Kappa), "The shape parameter κ (kappa) must be a number."); } - return null; + return null!; } /// diff --git a/Numerics/Distributions/Univariate/Geometric.cs b/Numerics/Distributions/Univariate/Geometric.cs index 1ee2b39e..7dbe5b60 100644 --- a/Numerics/Distributions/Univariate/Geometric.cs +++ b/Numerics/Distributions/Univariate/Geometric.cs @@ -219,7 +219,7 @@ public override ArgumentOutOfRangeException ValidateParameters(IList par throw new ArgumentOutOfRangeException(nameof(ProbabilityOfSuccess), "Probability must be between 0 and 1."); return new ArgumentOutOfRangeException(nameof(ProbabilityOfSuccess), "Probability must be between 0 and 1."); } - return null; + return null!; } /// diff --git a/Numerics/Distributions/Univariate/Gumbel.cs b/Numerics/Distributions/Univariate/Gumbel.cs index 2c08a36f..97b1b428 100644 --- a/Numerics/Distributions/Univariate/Gumbel.cs +++ b/Numerics/Distributions/Univariate/Gumbel.cs @@ -290,7 +290,7 @@ public ArgumentOutOfRangeException ValidateParameters(double location, double sc throw new ArgumentOutOfRangeException(nameof(Alpha), "The scale parameter α (alpha) must be positive."); return new ArgumentOutOfRangeException(nameof(Alpha), "The scale parameter α (alpha) must be positive."); } - return null; + return null!; } /// diff --git a/Numerics/Distributions/Univariate/InverseChiSquared.cs b/Numerics/Distributions/Univariate/InverseChiSquared.cs index 859afef1..433e2b41 100644 --- a/Numerics/Distributions/Univariate/InverseChiSquared.cs +++ b/Numerics/Distributions/Univariate/InverseChiSquared.cs @@ -269,7 +269,7 @@ public override ArgumentOutOfRangeException ValidateParameters(IList par throw new ArgumentOutOfRangeException(nameof(Sigma), "The scale parameter σ (sigma) must be positive."); return new ArgumentOutOfRangeException(nameof(Sigma), "The scale parameter σ (sigma) must be positive."); } - return null; + return null!; } /// diff --git a/Numerics/Distributions/Univariate/InverseGamma.cs b/Numerics/Distributions/Univariate/InverseGamma.cs index f2de885c..778b5a65 100644 --- a/Numerics/Distributions/Univariate/InverseGamma.cs +++ b/Numerics/Distributions/Univariate/InverseGamma.cs @@ -255,7 +255,7 @@ public override ArgumentOutOfRangeException ValidateParameters(IList par throw new ArgumentOutOfRangeException(nameof(Alpha), "The shape parameter α (alpha) must be positive."); return new ArgumentOutOfRangeException(nameof(Alpha), "The shape parameter α (alpha) must be positive."); } - return null; + return null!; } /// diff --git a/Numerics/Distributions/Univariate/KappaFour.cs b/Numerics/Distributions/Univariate/KappaFour.cs index a49a1350..390b6306 100644 --- a/Numerics/Distributions/Univariate/KappaFour.cs +++ b/Numerics/Distributions/Univariate/KappaFour.cs @@ -405,7 +405,7 @@ public override ArgumentOutOfRangeException ValidateParameters(IList par throw new ArgumentOutOfRangeException(nameof(Hondo), "The shape parameter h (hondo) must be a number."); return new ArgumentOutOfRangeException(nameof(Hondo), "The shape parameter h (hondo) must be a number."); } - return null; + return null!; } /// diff --git a/Numerics/Distributions/Univariate/KernelDensity.cs b/Numerics/Distributions/Univariate/KernelDensity.cs index b837afab..7cc8c808 100644 --- a/Numerics/Distributions/Univariate/KernelDensity.cs +++ b/Numerics/Distributions/Univariate/KernelDensity.cs @@ -152,14 +152,14 @@ public enum KernelType Uniform } - private double[] _sampleData; + private double[] _sampleData = Array.Empty(); private double _bandwidth; private KernelType _kernelDistribution; - private IKernel _kernel; + private IKernel _kernel = null!; private bool _cdfCreated = false; - private OrderedPairedData opd; + private OrderedPairedData opd = null!; private double u1, u2, u3, u4; - private double[] _weights; // one weight per sample (unnormalised) + private double[] _weights = null!; // one weight per sample (unnormalised) private double _sumW = 1.0; // Σ wᵢ (defaults to 1 for un‑weighted case) @@ -549,7 +549,7 @@ public double BandwidthRule(IList sampleData) /// /// Sample of data, no sorting is assumed. /// A list of weights. - public double BandwidthRule(IList sample, IList w = null) + public double BandwidthRule(IList sample, IList w = null!) { w ??= Enumerable.Repeat(1.0, sample.Count).ToArray(); double m = w.Zip(sample, (wi, xi) => wi * xi).Sum() / w.Sum(); @@ -574,7 +574,7 @@ public override void SetParameters(IList parameters) /// public override ArgumentOutOfRangeException ValidateParameters(IList parameters, bool throwException) { - return null; + return null!; } /// @@ -588,7 +588,7 @@ private ArgumentOutOfRangeException ValidateParameters(double value, bool throwE throw new ArgumentOutOfRangeException(nameof(Bandwidth), "The bandwidth must be a positive number!"); return new ArgumentOutOfRangeException(nameof(Bandwidth), "The bandwidth must be a positive number!"); } - return null; + return null!; } /// diff --git a/Numerics/Distributions/Univariate/LnNormal.cs b/Numerics/Distributions/Univariate/LnNormal.cs index 8956f4d9..9bd42ce4 100644 --- a/Numerics/Distributions/Univariate/LnNormal.cs +++ b/Numerics/Distributions/Univariate/LnNormal.cs @@ -311,7 +311,7 @@ public ArgumentOutOfRangeException ValidateParameters(double mean, double standa throw new ArgumentOutOfRangeException(nameof(Sigma), "Sigma must be positive."); return new ArgumentOutOfRangeException(nameof(Sigma), "Sigma must be positive."); } - return null; + return null!; } /// diff --git a/Numerics/Distributions/Univariate/LogNormal.cs b/Numerics/Distributions/Univariate/LogNormal.cs index d341a8a4..f2829d76 100644 --- a/Numerics/Distributions/Univariate/LogNormal.cs +++ b/Numerics/Distributions/Univariate/LogNormal.cs @@ -365,7 +365,7 @@ public ArgumentOutOfRangeException ValidateParameters(double mu, double sigma, b throw new ArgumentOutOfRangeException(nameof(Sigma), "Sigma must be positive."); return new ArgumentOutOfRangeException(nameof(Sigma), "Sigma must be positive."); } - return null; + return null!; } /// diff --git a/Numerics/Distributions/Univariate/LogPearsonTypeIII.cs b/Numerics/Distributions/Univariate/LogPearsonTypeIII.cs index afea3dc3..bd2ffdd1 100644 --- a/Numerics/Distributions/Univariate/LogPearsonTypeIII.cs +++ b/Numerics/Distributions/Univariate/LogPearsonTypeIII.cs @@ -529,7 +529,7 @@ public ArgumentOutOfRangeException ValidateParameters(double mu, double sigma, d throw new ArgumentOutOfRangeException(nameof(Gamma), "Gamma = " + gamma + ". Gamma must be greater than -5."); return new ArgumentOutOfRangeException(nameof(Gamma), "Gamma = " + gamma + ". Gamma must be greater than -5."); } - return null; + return null!; } /// diff --git a/Numerics/Distributions/Univariate/Logistic.cs b/Numerics/Distributions/Univariate/Logistic.cs index a06db19d..9b934d01 100644 --- a/Numerics/Distributions/Univariate/Logistic.cs +++ b/Numerics/Distributions/Univariate/Logistic.cs @@ -281,7 +281,7 @@ public override ArgumentOutOfRangeException ValidateParameters(IList par throw new ArgumentOutOfRangeException(nameof(Alpha), "The scale parameter α (alpha) must be positive."); return new ArgumentOutOfRangeException(nameof(Alpha), "The scale parameter α (alpha) must be positive."); } - return null; + return null!; } /// diff --git a/Numerics/Distributions/Univariate/Mixture.cs b/Numerics/Distributions/Univariate/Mixture.cs index 5472f70b..a782d856 100644 --- a/Numerics/Distributions/Univariate/Mixture.cs +++ b/Numerics/Distributions/Univariate/Mixture.cs @@ -79,9 +79,9 @@ public Mixture(double[] weights, IUnivariateDistribution[] distributions) SetParameters(weights, distributions); } - private double[] _weights; - private UnivariateDistributionBase[] _distributions; - private EmpiricalDistribution _empiricalCDF; + private double[] _weights = Array.Empty(); + private UnivariateDistributionBase[] _distributions = null!; + private EmpiricalDistribution _empiricalCDF = null!; private bool _momentsComputed = false; private double u1, u2, u3, u4; private bool _empiricalCDFCreated = false; @@ -618,7 +618,7 @@ public override ArgumentOutOfRangeException ValidateParameters(IList par return new ArgumentOutOfRangeException(nameof(Distributions), "Distribution " + (i + 1).ToString() + " has invalid parameters."); } } - return null; + return null!; } /// @@ -1086,27 +1086,30 @@ public override XElement ToXElement() public static Mixture FromXElement(XElement xElement) { UnivariateDistributionType type = UnivariateDistributionType.Deterministic; - if (xElement.Attribute(nameof(UnivariateDistributionBase.Type)) != null) + var univBaseAttr = xElement.Attribute(nameof(UnivariateDistributionBase.Type)); + if (univBaseAttr != null) { - Enum.TryParse(xElement.Attribute(nameof(UnivariateDistributionBase.Type)).Value, out type); + Enum.TryParse(univBaseAttr.Value, out type); } if (type == UnivariateDistributionType.Mixture) { var weights = new List(); var distributions = new List(); - if (xElement.Attribute(nameof(Weights)) != null) + var weightAttr = xElement.Attribute(nameof(Weights)); + if (weightAttr != null) { - var w = xElement.Attribute(nameof(Weights)).Value.Split('|'); + var w = weightAttr.Value.Split('|'); for (int i = 0; i < w.Length; i++) { double.TryParse(w[i], NumberStyles.Any, CultureInfo.InvariantCulture, out var weight); weights.Add(weight); } } - if (xElement.Attribute(nameof(Distributions)) != null) + var distAttr = xElement.Attribute(nameof(Distributions)); + if (distAttr != null) { - var types = xElement.Attribute(nameof(Distributions)).Value.Split('|'); + var types = distAttr.Value.Split('|'); for (int i = 0; i < types.Length; i++) { Enum.TryParse(types[i], out UnivariateDistributionType distType); @@ -1114,30 +1117,34 @@ public static Mixture FromXElement(XElement xElement) } } var mixture = new Mixture(weights.ToArray(), distributions.ToArray()); - - if (xElement.Attribute(nameof(IsZeroInflated)) != null) + var zeroInflatedAttr = xElement.Attribute(nameof(IsZeroInflated)); + if (zeroInflatedAttr != null) { - bool.TryParse(xElement.Attribute(nameof(IsZeroInflated)).Value, out var isZeroInflated); + bool.TryParse(zeroInflatedAttr.Value, out var isZeroInflated); mixture.IsZeroInflated = isZeroInflated; } - if (xElement.Attribute(nameof(ZeroWeight)) != null) + var zeroWeightAttr = xElement.Attribute(nameof(ZeroWeight)); + if (zeroWeightAttr != null) { - double.TryParse(xElement.Attribute(nameof(ZeroWeight)).Value, NumberStyles.Any, CultureInfo.InvariantCulture, out var zeroWeight); + double.TryParse(zeroWeightAttr.Value, NumberStyles.Any, CultureInfo.InvariantCulture, out var zeroWeight); mixture.ZeroWeight = zeroWeight; } - if (xElement.Attribute(nameof(XTransform)) != null) + var xTransformAttr = xElement.Attribute(nameof(XTransform)); + if (xTransformAttr != null) { - Enum.TryParse(xElement.Attribute(nameof(XTransform)).Value, out Transform xTransform); + Enum.TryParse(xTransformAttr.Value, out Transform xTransform); mixture.XTransform = xTransform; } - if (xElement.Attribute(nameof(ProbabilityTransform)) != null) + var xProbabilityTransformAttr = xElement.Attribute(nameof(ProbabilityTransform)); + if (xProbabilityTransformAttr != null) { - Enum.TryParse(xElement.Attribute(nameof(ProbabilityTransform)).Value, out Transform probabilityTransform); + Enum.TryParse(xProbabilityTransformAttr.Value, out Transform probabilityTransform); mixture.ProbabilityTransform = probabilityTransform; } - if (xElement.Attribute("Parameters") != null) + var xParametersAttr = xElement.Attribute("Parameters"); + if (xParametersAttr != null) { - var vals = xElement.Attribute("Parameters").Value.Split('|'); + var vals = xParametersAttr.Value.Split('|'); var parameters = new List(); for (int i = 0; i < vals.Length; i++) { @@ -1151,7 +1158,7 @@ public static Mixture FromXElement(XElement xElement) } else { - return null; + return null!; } } diff --git a/Numerics/Distributions/Univariate/NoncentralT.cs b/Numerics/Distributions/Univariate/NoncentralT.cs index e8b50077..e2b473fa 100644 --- a/Numerics/Distributions/Univariate/NoncentralT.cs +++ b/Numerics/Distributions/Univariate/NoncentralT.cs @@ -289,7 +289,7 @@ public ArgumentOutOfRangeException ValidateParameters(double v, double mu, bool throw new ArgumentOutOfRangeException(nameof(Noncentrality), "The noncentrality parameter μ (mu) must be a number."); return new ArgumentOutOfRangeException(nameof(Noncentrality), "The noncentrality parameter μ (mu) must be a number."); } - return null; + return null!; } /// diff --git a/Numerics/Distributions/Univariate/Normal.cs b/Numerics/Distributions/Univariate/Normal.cs index f62911c2..5a993176 100644 --- a/Numerics/Distributions/Univariate/Normal.cs +++ b/Numerics/Distributions/Univariate/Normal.cs @@ -349,7 +349,7 @@ public ArgumentOutOfRangeException ValidateParameters(double location, double sc throw new ArgumentOutOfRangeException(nameof(Sigma), "Standard deviation must be positive."); return new ArgumentOutOfRangeException(nameof(Sigma), "Standard deviation must be positive."); } - return null; + return null!; } /// diff --git a/Numerics/Distributions/Univariate/Pareto.cs b/Numerics/Distributions/Univariate/Pareto.cs index 5caed988..e1e0a85e 100644 --- a/Numerics/Distributions/Univariate/Pareto.cs +++ b/Numerics/Distributions/Univariate/Pareto.cs @@ -261,7 +261,7 @@ public override ArgumentOutOfRangeException ValidateParameters(IList par throw new ArgumentOutOfRangeException(nameof(Alpha), "The shape parameter α (alpha) must be positive."); return new ArgumentOutOfRangeException(nameof(Alpha), "The shape parameter α (alpha) must be positive."); } - return null; + return null!; } /// diff --git a/Numerics/Distributions/Univariate/PearsonTypeIII.cs b/Numerics/Distributions/Univariate/PearsonTypeIII.cs index 598bb216..74a67c67 100644 --- a/Numerics/Distributions/Univariate/PearsonTypeIII.cs +++ b/Numerics/Distributions/Univariate/PearsonTypeIII.cs @@ -406,7 +406,7 @@ public ArgumentOutOfRangeException ValidateParameters(double mu, double sigma, d throw new ArgumentOutOfRangeException(nameof(Gamma), "Gamma = " + gamma + ". Gamma must be greater than -5."); return new ArgumentOutOfRangeException(nameof(Gamma), "Gamma = " + gamma + ". Gamma must be greater than -5."); } - return null; + return null!; } /// @@ -814,7 +814,7 @@ public override double[] ConditionalMoments(double a, double b) private (bool stable, bool divisable) TryPearsonConditionalMoments( double a, double b, out double[] moments, double pMin) { - moments = null; + moments = null!; // ---- small math helpers ------------------------------------------------- static double Phi(double x) => 0.5 * (1.0 + Mathematics.SpecialFunctions.Erf.Function(x / Math.Sqrt(2.0))); diff --git a/Numerics/Distributions/Univariate/Pert.cs b/Numerics/Distributions/Univariate/Pert.cs index e7c0e81c..62235836 100644 --- a/Numerics/Distributions/Univariate/Pert.cs +++ b/Numerics/Distributions/Univariate/Pert.cs @@ -340,7 +340,7 @@ private ArgumentOutOfRangeException ValidateParameters(double min, double mode, if (throwException) throw new ArgumentOutOfRangeException(nameof(MostLikely), "The mode (most likely) must be between the min and max."); return new ArgumentOutOfRangeException(nameof(MostLikely), "The mode (most likely) must be between the min and max."); } - return null; + return null!; } /// diff --git a/Numerics/Distributions/Univariate/PertPercentile.cs b/Numerics/Distributions/Univariate/PertPercentile.cs index 24b4e4f4..08d2c83a 100644 --- a/Numerics/Distributions/Univariate/PertPercentile.cs +++ b/Numerics/Distributions/Univariate/PertPercentile.cs @@ -326,7 +326,7 @@ private ArgumentOutOfRangeException ValidateParameters(double fifth, double fift if (throwException) throw new ArgumentOutOfRangeException(nameof(Percentile50th), "The 50% must be between the 5% and 95%."); return new ArgumentOutOfRangeException(nameof(Percentile50th), "The 50% must be between the 5% and 95%."); } - return null; + return null!; } /// @@ -507,17 +507,19 @@ public override XElement ToXElement() public static PertPercentile FromXElement(XElement xElement) { UnivariateDistributionType type = UnivariateDistributionType.Deterministic; - if (xElement.Attribute(nameof(UnivariateDistributionBase.Type)) != null) + var xUnivAttr = xElement.Attribute(nameof(UnivariateDistributionBase.Type)); + if ( xUnivAttr != null) { - Enum.TryParse(xElement.Attribute(nameof(UnivariateDistributionBase.Type)).Value, out type); + Enum.TryParse(xUnivAttr.Value, out type); } if (type == UnivariateDistributionType.PertPercentile) { bool parametersSolved = false; - if (xElement.Attribute("ParametersSolved") != null) + var xParamSolvedAttr = xElement.Attribute("ParametersSolved"); + if (xParamSolvedAttr != null) { - bool.TryParse(xElement.Attribute("ParametersSolved").Value, out parametersSolved); + bool.TryParse(xParamSolvedAttr.Value, out parametersSolved); } else { @@ -528,9 +530,10 @@ public static PertPercentile FromXElement(XElement xElement) var vals = new double[dist.NumberOfParameters]; for (int i = 0; i < dist.NumberOfParameters; i++) { - if (xElement.Attribute(names[i]) != null) + var xAttr = xElement.Attribute(names[i]); + if (xAttr != null) { - double.TryParse(xElement.Attribute(names[i]).Value, NumberStyles.Any, CultureInfo.InvariantCulture, out vals[i]); + double.TryParse(xAttr.Value, NumberStyles.Any, CultureInfo.InvariantCulture, out vals[i]); } } dist.SetParameters(vals); @@ -539,9 +542,10 @@ public static PertPercentile FromXElement(XElement xElement) } var beta = new GeneralizedBeta(); - if (xElement.Attribute("BetaParameters") != null) + var xBetaAttr = xElement.Attribute("BetaParameters"); + if (xBetaAttr != null) { - var vals = xElement.Attribute("BetaParameters").Value.Split('|'); + var vals = xBetaAttr.Value.Split('|'); var parameters = new List(); for (int i = 0; i < vals.Length; i++) { @@ -551,9 +555,10 @@ public static PertPercentile FromXElement(XElement xElement) beta.SetParameters(parameters); } double _5th = 0, _50th = 0, _95th = 0; - if (xElement.Attribute("Parameters") != null) + var xParamAttr = xElement.Attribute("Parameters"); + if (xParamAttr != null) { - var vals = xElement.Attribute("Parameters").Value.Split('|'); + var vals = xParamAttr.Value.Split('|'); double.TryParse(vals[0], NumberStyles.Any, CultureInfo.InvariantCulture, out _5th); double.TryParse(vals[1], NumberStyles.Any, CultureInfo.InvariantCulture, out _50th); double.TryParse(vals[2], NumberStyles.Any, CultureInfo.InvariantCulture, out _95th); @@ -571,7 +576,7 @@ public static PertPercentile FromXElement(XElement xElement) } else { - return null; + return null!; } } diff --git a/Numerics/Distributions/Univariate/PertPercentileZ.cs b/Numerics/Distributions/Univariate/PertPercentileZ.cs index 554c3aae..b32bd60f 100644 --- a/Numerics/Distributions/Univariate/PertPercentileZ.cs +++ b/Numerics/Distributions/Univariate/PertPercentileZ.cs @@ -330,7 +330,7 @@ private ArgumentOutOfRangeException ValidateParameters(double fifth, double fift if (throwException) throw new ArgumentOutOfRangeException(nameof(Percentile95th), "The percentiles must be between 0 and 1."); return new ArgumentOutOfRangeException(nameof(Percentile95th), "The percentiles must be between 0 and 1."); } - return null; + return null!; } /// @@ -504,17 +504,19 @@ public override XElement ToXElement() public static PertPercentileZ FromXElement(XElement xElement) { UnivariateDistributionType type = UnivariateDistributionType.Deterministic; - if (xElement.Attribute(nameof(UnivariateDistributionBase.Type)) != null) + var xUnivAttr = xElement.Attribute(nameof(UnivariateDistributionBase.Type)); + if ( xUnivAttr != null) { - Enum.TryParse(xElement.Attribute(nameof(UnivariateDistributionBase.Type)).Value, out type); + Enum.TryParse(xUnivAttr.Value, out type); } if (type == UnivariateDistributionType.PertPercentileZ) { bool parametersSolved = false; - if (xElement.Attribute("ParametersSolved") != null) + var xParamSolvedAttr = xElement.Attribute("ParametersSolved"); + if (xParamSolvedAttr != null) { - bool.TryParse(xElement.Attribute("ParametersSolved").Value, out parametersSolved); + bool.TryParse(xParamSolvedAttr.Value, out parametersSolved); } else { @@ -525,9 +527,10 @@ public static PertPercentileZ FromXElement(XElement xElement) var vals = new double[dist.NumberOfParameters]; for (int i = 0; i < dist.NumberOfParameters; i++) { - if (xElement.Attribute(names[i]) != null) + var xAttr = xElement.Attribute(names[i]); + if (xAttr != null) { - double.TryParse(xElement.Attribute(names[i]).Value, NumberStyles.Any, CultureInfo.InvariantCulture, out vals[i]); + double.TryParse(xAttr.Value, NumberStyles.Any, CultureInfo.InvariantCulture, out vals[i]); } } dist.SetParameters(vals); @@ -536,9 +539,10 @@ public static PertPercentileZ FromXElement(XElement xElement) } var beta = new GeneralizedBeta(); - if (xElement.Attribute("BetaParameters") != null) + var xBetaAttr = xElement.Attribute("BetaParameters"); + if (xBetaAttr != null) { - var vals = xElement.Attribute("BetaParameters").Value.Split('|'); + var vals = xBetaAttr.Value.Split('|'); var parameters = new List(); for (int i = 0; i < vals.Length; i++) { @@ -548,9 +552,10 @@ public static PertPercentileZ FromXElement(XElement xElement) beta.SetParameters(parameters); } double _5th = 0, _50th = 0, _95th = 0; - if (xElement.Attribute("Parameters") != null) + var xParamAttr = xElement.Attribute("Parameters"); + if (xParamAttr != null) { - var vals = xElement.Attribute("Parameters").Value.Split('|'); + var vals = xParamAttr.Value.Split('|'); double.TryParse(vals[0], NumberStyles.Any, CultureInfo.InvariantCulture, out _5th); double.TryParse(vals[1], NumberStyles.Any, CultureInfo.InvariantCulture, out _50th); double.TryParse(vals[2], NumberStyles.Any, CultureInfo.InvariantCulture, out _95th); @@ -569,7 +574,7 @@ public static PertPercentileZ FromXElement(XElement xElement) } else { - return null; + return null!; } } diff --git a/Numerics/Distributions/Univariate/Poisson.cs b/Numerics/Distributions/Univariate/Poisson.cs index 6295ddce..9e8d1821 100644 --- a/Numerics/Distributions/Univariate/Poisson.cs +++ b/Numerics/Distributions/Univariate/Poisson.cs @@ -217,7 +217,7 @@ public override ArgumentOutOfRangeException ValidateParameters(IList par throw new ArgumentOutOfRangeException(nameof(Lambda), "The rate (λ) must be positive."); return new ArgumentOutOfRangeException(nameof(Lambda), "The rate (λ) must be positive."); } - return null; + return null!; } /// diff --git a/Numerics/Distributions/Univariate/Rayleigh.cs b/Numerics/Distributions/Univariate/Rayleigh.cs index c5bca12c..8c74e25e 100644 --- a/Numerics/Distributions/Univariate/Rayleigh.cs +++ b/Numerics/Distributions/Univariate/Rayleigh.cs @@ -267,7 +267,7 @@ public ArgumentOutOfRangeException ValidateParameters(double scale, bool throwEx throw new ArgumentOutOfRangeException(nameof(Sigma), "Standard deviation must be greater than zero."); return new ArgumentOutOfRangeException(nameof(Sigma), "Standard deviation must be greater than zero."); } - return null; + return null!; } /// diff --git a/Numerics/Distributions/Univariate/StudentT.cs b/Numerics/Distributions/Univariate/StudentT.cs index a6f821cc..599babb5 100644 --- a/Numerics/Distributions/Univariate/StudentT.cs +++ b/Numerics/Distributions/Univariate/StudentT.cs @@ -361,7 +361,7 @@ public ArgumentOutOfRangeException ValidateParameters(double location, double sc throw new ArgumentOutOfRangeException(nameof(DegreesOfFreedom), "The degrees of freedom ν (nu) must greater than or equal to one."); return new ArgumentOutOfRangeException(nameof(DegreesOfFreedom), "The degrees of freedom ν (nu) must greater than or equal to one."); } - return null; + return null!; } /// diff --git a/Numerics/Distributions/Univariate/Triangular.cs b/Numerics/Distributions/Univariate/Triangular.cs index 3a65a94f..e16e098b 100644 --- a/Numerics/Distributions/Univariate/Triangular.cs +++ b/Numerics/Distributions/Univariate/Triangular.cs @@ -334,7 +334,7 @@ private ArgumentOutOfRangeException ValidateParameters(double min, double mode, throw new ArgumentOutOfRangeException(nameof(MostLikely), "The mode (most likely) must be between the min and max."); return new ArgumentOutOfRangeException(nameof(MostLikely), "The mode (most likely) must be between the min and max."); } - return null; + return null!; } /// diff --git a/Numerics/Distributions/Univariate/TruncatedDistribution.cs b/Numerics/Distributions/Univariate/TruncatedDistribution.cs index 04275f56..6fb26359 100644 --- a/Numerics/Distributions/Univariate/TruncatedDistribution.cs +++ b/Numerics/Distributions/Univariate/TruncatedDistribution.cs @@ -273,7 +273,7 @@ public override void SetParameters(IList parameters) /// public override ArgumentOutOfRangeException ValidateParameters(IList parameters, bool throwException) { - if (_baseDist != null) _baseDist.ValidateParameters(parameters.ToArray().Subset(0, parameters.Count - 2), throwException); + if (_baseDist != null!) _baseDist.ValidateParameters(parameters.ToArray().Subset(0, parameters.Count - 2), throwException); if (double.IsNaN(Min) || double.IsNaN(Max) || double.IsInfinity(Min) || double.IsInfinity(Max) || Min >= Max) { if (throwException) @@ -286,7 +286,7 @@ public override ArgumentOutOfRangeException ValidateParameters(IList par throw new ArgumentOutOfRangeException(nameof(Min), "Truncation interval has zero probability mass."); return new ArgumentOutOfRangeException(nameof(Min), "Truncation interval has zero probability mass."); } - return null; + return null!; } /// diff --git a/Numerics/Distributions/Univariate/TruncatedNormal.cs b/Numerics/Distributions/Univariate/TruncatedNormal.cs index f1850f01..465352bd 100644 --- a/Numerics/Distributions/Univariate/TruncatedNormal.cs +++ b/Numerics/Distributions/Univariate/TruncatedNormal.cs @@ -386,7 +386,7 @@ public ArgumentOutOfRangeException ValidateParameters(double mean, double standa throw new ArgumentOutOfRangeException(nameof(Min), "The min cannot be greater than the max."); return new ArgumentOutOfRangeException(nameof(Min), "The min cannot be greater than the max."); } - return null; + return null!; } /// diff --git a/Numerics/Distributions/Univariate/Uncertainty Analysis/BootstrapAnalysis.cs b/Numerics/Distributions/Univariate/Uncertainty Analysis/BootstrapAnalysis.cs index 93940d00..28c2b6b5 100644 --- a/Numerics/Distributions/Univariate/Uncertainty Analysis/BootstrapAnalysis.cs +++ b/Numerics/Distributions/Univariate/Uncertainty Analysis/BootstrapAnalysis.cs @@ -141,7 +141,7 @@ public IUnivariateDistribution[] Distributions() // MLE and certain L-moments methods can fail to find a solution // On fail, set to null - if (failed == true) bootDistributions[idx] = null; + if (failed == true) bootDistributions[idx] = null!; }); return bootDistributions; @@ -171,7 +171,7 @@ public IUnivariateDistribution[] Distributions(ParameterSet[] parameterSets) }; // On fail, set to null - if (failed == true) bootDistributions[idx] = null; + if (failed == true) bootDistributions[idx] = null!; }); return bootDistributions; @@ -181,7 +181,7 @@ public IUnivariateDistribution[] Distributions(ParameterSet[] parameterSets) /// /// Bootstrap an array of distribution parameters. /// - public double[,] Parameters(IUnivariateDistribution[] distributions = null) + public double[,] Parameters(IUnivariateDistribution[] distributions = null!) { var bootDistributions = distributions != null ? distributions : Distributions(); var bootParameters = new double[bootDistributions.Count(), Distribution.NumberOfParameters]; @@ -206,7 +206,7 @@ public IUnivariateDistribution[] Distributions(ParameterSet[] parameterSets) /// /// Bootstrap an array of distribution parameter sets. /// - public ParameterSet[] ParameterSets(IUnivariateDistribution[] distributions = null) + public ParameterSet[] ParameterSets(IUnivariateDistribution[] distributions = null!) { var bootDistributions = distributions != null ? distributions : Distributions(); var bootParameters = new ParameterSet[bootDistributions.Count()]; @@ -293,7 +293,7 @@ public ParameterSet[] ParameterSets(IUnivariateDistribution[] distributions = nu /// The confidence level; Default = 0.1, which will result in the 90% confidence intervals. /// Optional. Pass in an array of bootstrapped distributions. Default = null. /// Optional. Determines whether to record parameter sets. Default = true. - public UncertaintyAnalysisResults Estimate(IList probabilities, double alpha = 0.1, IUnivariateDistribution[] distributions = null, bool recordParameterSets = true) + public UncertaintyAnalysisResults Estimate(IList probabilities, double alpha = 0.1, IUnivariateDistribution[] distributions = null!, bool recordParameterSets = true) { var results = new UncertaintyAnalysisResults(); results.ParentDistribution = (UnivariateDistributionBase)Distribution; @@ -344,7 +344,7 @@ public UncertaintyAnalysisResults Estimate(IList probabilities, double a /// List quantile values. /// List of non-exceedance probabilities. /// Optional. Pass in an array of bootstrapped distributions. Default = null. - public double[] ExpectedProbabilities(IList quantiles, IList probabilities, IUnivariateDistribution[] distributions = null) + public double[] ExpectedProbabilities(IList quantiles, IList probabilities, IUnivariateDistribution[] distributions = null!) { var quants = quantiles.ToArray(); var probs = probabilities.ToArray(); @@ -396,7 +396,7 @@ public double[] ExpectedProbabilities(IList quantiles, IList pro /// /// List quantile values. /// Optional. Pass in an array of bootstrapped distributions. Default = null. - public double[] ExpectedProbabilities(IList quantiles, IUnivariateDistribution[] distributions = null) + public double[] ExpectedProbabilities(IList quantiles, IUnivariateDistribution[] distributions = null!) { var quants = quantiles.ToArray(); Array.Sort(quants); @@ -447,7 +447,7 @@ public double[] ComputeMinMaxQuantiles(double minProbability, double maxProbabil /// List of non-exceedance probabilities. /// The confidence level; Default = 0.1, which will result in the 90% confidence intervals. /// Optional. Pass in an array of bootstrapped distributions. Default = null. - public double[,] PercentileQuantileCI(IList probabilities, double alpha = 0.1, IUnivariateDistribution[] distributions = null) + public double[,] PercentileQuantileCI(IList probabilities, double alpha = 0.1, IUnivariateDistribution[] distributions = null!) { var CIs = new double[] { alpha / 2d, 1d - alpha / 2d }; var Output = new double[probabilities.Count, 2]; @@ -474,7 +474,7 @@ public double[] ComputeMinMaxQuantiles(double minProbability, double maxProbabil /// List of non-exceedance probabilities. /// The confidence level; Default = 0.1, which will result in the 90% confidence intervals. /// Optional. Pass in an array of bootstrapped distributions. Default = null. - public double[,] BiasCorrectedQuantileCI(IList probabilities, double alpha = 0.1, IUnivariateDistribution[] distributions = null) + public double[,] BiasCorrectedQuantileCI(IList probabilities, double alpha = 0.1, IUnivariateDistribution[] distributions = null!) { // Create list of original X values given probability values var populationXValues = new double[probabilities.Count]; @@ -520,7 +520,7 @@ public double[] ComputeMinMaxQuantiles(double minProbability, double maxProbabil /// List of non-exceedance probabilities. /// The confidence level; Default = 0.1, which will result in the 90% confidence intervals. /// Optional. Pass in an array of bootstrapped distributions. Default = null. - public double[,] NormalQuantileCI(IList probabilities, double alpha = 0.1, IUnivariateDistribution[] distributions = null) + public double[,] NormalQuantileCI(IList probabilities, double alpha = 0.1, IUnivariateDistribution[] distributions = null!) { // Create list of original X values given probability values @@ -713,7 +713,7 @@ private double[] AccelerationConstants(IList sampleData, IList p { // MLE and certain L-moments methods can fail to find a solution // On fail, set to null - bootDistributions[i] = null; + bootDistributions[i] = null!; for (int j = 0; j < probabilities.Count; j++) { xValues[i, j] = double.NaN; diff --git a/Numerics/Distributions/Univariate/Uncertainty Analysis/UncertaintyAnalysisResults.cs b/Numerics/Distributions/Univariate/Uncertainty Analysis/UncertaintyAnalysisResults.cs index c463c299..29ff0a46 100644 --- a/Numerics/Distributions/Univariate/Uncertainty Analysis/UncertaintyAnalysisResults.cs +++ b/Numerics/Distributions/Univariate/Uncertainty Analysis/UncertaintyAnalysisResults.cs @@ -81,7 +81,7 @@ public UncertaintyAnalysisResults(UnivariateDistributionBase parentDistribution, double maxProbability = 1 - 1e-9, bool recordParameterSets = false) { - if (parentDistribution == null) + if (parentDistribution == null!) throw new ArgumentNullException(nameof(parentDistribution)); if (sampledDistributions == null || sampledDistributions.Length == 0) throw new ArgumentException("Sampled distributions cannot be null or empty.", nameof(sampledDistributions)); @@ -106,27 +106,27 @@ public UncertaintyAnalysisResults(UnivariateDistributionBase parentDistribution, /// /// The parent probability distribution. /// - public UnivariateDistributionBase ParentDistribution { get; set; } + public UnivariateDistributionBase ParentDistribution { get; set; } = null!; /// /// The array of parameter sets. /// - public ParameterSet[] ParameterSets { get; set; } + public ParameterSet[] ParameterSets { get; set; } = null!; /// /// The confidence intervals. /// - public double[,] ConfidenceIntervals { get; set; } + public double[,] ConfidenceIntervals { get; set; } = null!; /// /// The mode (or computed) curve from the parent distribution. /// - public double[] ModeCurve { get; set; } + public double[] ModeCurve { get; set; } = null!; /// /// The mean (or predictive) curve. /// - public double[] MeanCurve { get; set; } + public double[] MeanCurve { get; set; } = null!; /// /// Gets or sets the Akaike information criteria (AIC) of the fit. @@ -189,7 +189,8 @@ public static UncertaintyAnalysisResults FromByteArray(byte[] bytes) options.Converters.Add(new Double2DArrayConverter()); options.Converters.Add(new String2DArrayConverter()); options.Converters.Add(new UnivariateDistributionConverter()); - return JsonSerializer.Deserialize(bytes, options); + var result = JsonSerializer.Deserialize(bytes, options); + return result ?? FromByteArrayLegacy(bytes); } catch (Exception) { @@ -226,7 +227,7 @@ private static UncertaintyAnalysisResults FromByteArrayLegacy(byte[] bytes) // If there is an error, just catch it and force the user to rerun the // uncertainty analysis. } - return null; + return null!; } /// @@ -235,7 +236,7 @@ private static UncertaintyAnalysisResults FromByteArrayLegacy(byte[] bytes) public XElement ToXElement() { var result = new XElement(nameof(UncertaintyAnalysisResults)); - if (ParentDistribution != null) result.Add(ParentDistribution.ToXElement()); + if (ParentDistribution != null!) result.Add(ParentDistribution.ToXElement()); result.SetAttributeValue(nameof(AIC), AIC.ToString("G17", CultureInfo.InvariantCulture)); result.SetAttributeValue(nameof(BIC), BIC.ToString("G17", CultureInfo.InvariantCulture)); result.SetAttributeValue(nameof(DIC), DIC.ToString("G17", CultureInfo.InvariantCulture)); @@ -268,44 +269,62 @@ public static UncertaintyAnalysisResults FromXElement(XElement xElement) { var ua = new UncertaintyAnalysisResults(); // Parent distribution - if (xElement.Element("Distribution") != null) - ua.ParentDistribution = UnivariateDistributionFactory.CreateDistribution(xElement.Element("Distribution")); + var distElement = xElement.Element("Distribution"); + if (distElement != null) + { + var parentDist = UnivariateDistributionFactory.CreateDistribution(distElement); + if (parentDist is not null) + { + ua.ParentDistribution = parentDist; + } + else + { + throw new InvalidDataException("Unable to deserialize parent distribution from XElement."); + } + } + // AIC - if (xElement.Attribute(nameof(AIC)) != null) + var aicElement = xElement.Attribute(nameof(AIC)); + if (aicElement != null) { - double.TryParse(xElement.Attribute(nameof(AIC)).Value, NumberStyles.Any, CultureInfo.InvariantCulture, out var aic); + double.TryParse(aicElement.Value, NumberStyles.Any, CultureInfo.InvariantCulture, out var aic); ua.AIC = aic; } // BIC - if (xElement.Attribute(nameof(BIC)) != null) + var bicElement = xElement.Attribute(nameof(BIC)); + if (bicElement != null) { - double.TryParse(xElement.Attribute(nameof(BIC)).Value, NumberStyles.Any, CultureInfo.InvariantCulture, out var bic); + double.TryParse(bicElement.Value, NumberStyles.Any, CultureInfo.InvariantCulture, out var bic); ua.BIC = bic; } // DIC - if (xElement.Attribute(nameof(DIC)) != null) + var dicElement = xElement.Attribute(nameof(DIC)); + if (dicElement != null) { - double.TryParse(xElement.Attribute(nameof(DIC)).Value, NumberStyles.Any, CultureInfo.InvariantCulture, out var dic); + double.TryParse(dicElement.Value, NumberStyles.Any, CultureInfo.InvariantCulture, out var dic); ua.DIC = dic; } // RMSE - if (xElement.Attribute(nameof(RMSE)) != null) + var rmseElement = xElement.Attribute(nameof(RMSE)); + if (rmseElement != null) { - double.TryParse(xElement.Attribute(nameof(RMSE)).Value, NumberStyles.Any, CultureInfo.InvariantCulture, out var rmse); + double.TryParse(rmseElement.Value, NumberStyles.Any, CultureInfo.InvariantCulture, out var rmse); ua.RMSE = rmse; } // ERL - if (xElement.Attribute(nameof(ERL)) != null) + var erlElement = xElement.Attribute(nameof(ERL)); + if (erlElement != null) { - double.TryParse(xElement.Attribute(nameof(ERL)).Value, NumberStyles.Any, CultureInfo.InvariantCulture, out var erl); + double.TryParse(erlElement.Value, NumberStyles.Any, CultureInfo.InvariantCulture, out var erl); ua.ERL = erl; } // Mode Curve - if (xElement.Attribute(nameof(ua.ModeCurve)) != null) + var modeAttr = xElement.Attribute(nameof(ua.ModeCurve)); + if (modeAttr != null) { - var vals = xElement.Attribute(nameof(ua.ModeCurve)).Value.Split('|'); + var vals = modeAttr.Value.Split('|'); if (vals.Length > 0) { ua.ModeCurve = new double[vals.Length]; @@ -316,9 +335,10 @@ public static UncertaintyAnalysisResults FromXElement(XElement xElement) } } // Mean Curve - if (xElement.Attribute(nameof(ua.MeanCurve)) != null) + var meanAttr = xElement.Attribute(nameof(ua.MeanCurve)); + if (meanAttr != null) { - var vals = xElement.Attribute(nameof(ua.MeanCurve)).Value.Split('|'); + var vals = meanAttr.Value.Split('|'); if (vals.Length > 0) { ua.MeanCurve = new double[vals.Length]; @@ -329,9 +349,10 @@ public static UncertaintyAnalysisResults FromXElement(XElement xElement) } } // Confidence Intervals - if (xElement.Attribute(nameof(ua.ConfidenceIntervals)) != null) + var ciAttr = xElement.Attribute(nameof(ua.ConfidenceIntervals)); + if (ciAttr != null) { - var vals = xElement.Attribute(nameof(ua.ConfidenceIntervals)).Value.Split('|'); + var vals = ciAttr.Value.Split('|'); if (vals.Length > 0) { ua.ConfidenceIntervals = new double[vals.Length, vals[0].Split(',').Length]; @@ -355,7 +376,7 @@ public static UncertaintyAnalysisResults FromXElement(XElement xElement) /// Array of non-exceedance probabilities. public void ProcessModeCurve(UnivariateDistributionBase parentDistribution, double[] probabilities) { - if (parentDistribution == null) + if (parentDistribution == null!) throw new ArgumentNullException(nameof(parentDistribution)); if (probabilities == null || probabilities.Length == 0) throw new ArgumentException("Probabilities cannot be null or empty.", nameof(probabilities)); @@ -442,7 +463,7 @@ public void ProcessMeanCurve(UnivariateDistributionBase[] sampledDistributions, Parallel.For(0, B, j => { - if (sampledDistributions[j] != null) + if (sampledDistributions[j] != null!) { var innerMin = sampledDistributions[j].InverseCDF(minProbability); var innerMax = sampledDistributions[j].InverseCDF(maxProbability); @@ -478,7 +499,7 @@ public void ProcessMeanCurve(UnivariateDistributionBase[] sampledDistributions, double total = 0d; Parallel.For(0, B, () => 0d, (j, loop, sum) => { - if (sampledDistributions[j] != null) + if (sampledDistributions[j] != null!) { sum += sampledDistributions[j].CDF(quantiles[i]); } @@ -530,7 +551,7 @@ public void ProcessParameterSets(UnivariateDistributionBase[] sampledDistributio Parallel.For(0, B, idx => { - if (sampledDistributions[idx] != null) + if (sampledDistributions[idx] != null!) { ParameterSets[idx] = new ParameterSet(sampledDistributions[idx].GetParameters, double.NaN); } diff --git a/Numerics/Distributions/Univariate/Uniform.cs b/Numerics/Distributions/Univariate/Uniform.cs index cea983c7..9497e43f 100644 --- a/Numerics/Distributions/Univariate/Uniform.cs +++ b/Numerics/Distributions/Univariate/Uniform.cs @@ -251,7 +251,7 @@ public ArgumentOutOfRangeException ValidateParameters(double min, double max, bo throw new ArgumentOutOfRangeException(nameof(Min), "The min cannot be greater than the max."); return new ArgumentOutOfRangeException(nameof(Min), "The min cannot be greater than the max."); } - return null; + return null!; } /// diff --git a/Numerics/Distributions/Univariate/UniformDiscrete.cs b/Numerics/Distributions/Univariate/UniformDiscrete.cs index f5f6500d..ab603bec 100644 --- a/Numerics/Distributions/Univariate/UniformDiscrete.cs +++ b/Numerics/Distributions/Univariate/UniformDiscrete.cs @@ -259,7 +259,7 @@ public ArgumentOutOfRangeException ValidateParameters(double min, double max, bo throw new ArgumentOutOfRangeException(nameof(Min), "The min cannot be greater than the max."); return new ArgumentOutOfRangeException(nameof(Min), "The min cannot be greater than the max."); } - return null; + return null!; } /// diff --git a/Numerics/Distributions/Univariate/Weibull.cs b/Numerics/Distributions/Univariate/Weibull.cs index f3683016..1f8a5bc4 100644 --- a/Numerics/Distributions/Univariate/Weibull.cs +++ b/Numerics/Distributions/Univariate/Weibull.cs @@ -304,7 +304,7 @@ public ArgumentOutOfRangeException ValidateParameters(double scale, double shape throw new ArgumentOutOfRangeException(nameof(Kappa), "The shape parameter κ (kappa) must be positive."); return new ArgumentOutOfRangeException(nameof(Kappa), "The shape parameter κ (kappa) must be positive."); } - return null; + return null!; } /// diff --git a/Numerics/Functions/LinearFunction.cs b/Numerics/Functions/LinearFunction.cs index 2902b269..9d4dee0f 100644 --- a/Numerics/Functions/LinearFunction.cs +++ b/Numerics/Functions/LinearFunction.cs @@ -173,7 +173,7 @@ public ArgumentOutOfRangeException ValidateParameters(IList parameters, throw new ArgumentOutOfRangeException(nameof(Sigma), "Standard error must be greater than zero."); return new ArgumentOutOfRangeException(nameof(Sigma), "Standard error must be greater than zero."); } - return null; + return null!; } /// diff --git a/Numerics/Functions/PowerFunction.cs b/Numerics/Functions/PowerFunction.cs index 08250343..900ac1e7 100644 --- a/Numerics/Functions/PowerFunction.cs +++ b/Numerics/Functions/PowerFunction.cs @@ -200,7 +200,7 @@ public ArgumentOutOfRangeException ValidateParameters(IList parameters, throw new ArgumentOutOfRangeException(nameof(Sigma), "Standard error must be greater than zero."); return new ArgumentOutOfRangeException(nameof(Sigma), "Standard error must be greater than zero."); } - return null; + return null!; } /// diff --git a/Numerics/Functions/TabularFunction.cs b/Numerics/Functions/TabularFunction.cs index ab741110..8e374875 100644 --- a/Numerics/Functions/TabularFunction.cs +++ b/Numerics/Functions/TabularFunction.cs @@ -159,7 +159,7 @@ public ArgumentOutOfRangeException ValidateParameters(IList parameters, throw new ArgumentOutOfRangeException(nameof(PairedData), "The uncertain ordered paired data has errors."); return new ArgumentOutOfRangeException(nameof(PairedData), "The uncertain ordered paired data has errors."); } - return null; + return null!; } /// diff --git a/Numerics/Machine Learning/Supervised/DecisionTree.cs b/Numerics/Machine Learning/Supervised/DecisionTree.cs index e3a9d35f..bab82c7d 100644 --- a/Numerics/Machine Learning/Supervised/DecisionTree.cs +++ b/Numerics/Machine Learning/Supervised/DecisionTree.cs @@ -456,7 +456,7 @@ public double[] Predict(double[,] X) /// The matrix of predictors. public double[] Predict(Matrix X) { - if (!IsTrained || X.NumberOfColumns != Dimensions) return null; + if (!IsTrained || X.NumberOfColumns != Dimensions) return null!; var result = new double[X.NumberOfRows]; for (int i = 0; i < X.NumberOfRows; i++) { diff --git a/Numerics/Machine Learning/Supervised/GeneralizedLinearModel.cs b/Numerics/Machine Learning/Supervised/GeneralizedLinearModel.cs index 19aed4db..e36ff85c 100644 --- a/Numerics/Machine Learning/Supervised/GeneralizedLinearModel.cs +++ b/Numerics/Machine Learning/Supervised/GeneralizedLinearModel.cs @@ -118,7 +118,7 @@ public GeneralizedLinearModel(Matrix x, Vector y, bool hasIntercept = true, Link /// /// The list of estimated parameter values. /// - public double[] Parameters { get; private set; } + public double[] Parameters { get; private set; } = Array.Empty(); /// /// The list of the estimated parameter names. @@ -128,27 +128,27 @@ public GeneralizedLinearModel(Matrix x, Vector y, bool hasIntercept = true, Link /// /// The list of the estimated parameter standard errors. /// - public double[] ParameterStandardErrors { get; private set; } + public double[] ParameterStandardErrors { get; private set; } = Array.Empty(); /// /// The list of the estimated parameter z-scores. /// - public double[] ParameterZScores { get; private set; } + public double[] ParameterZScores { get; private set; } = Array.Empty(); /// /// The list of the estimated parameter p-values. /// - public double[] ParameterPValues { get; private set; } + public double[] ParameterPValues { get; private set; } = Array.Empty(); /// /// The estimate parameter covariance matrix. /// - public Matrix Covariance { get; private set; } + public Matrix Covariance { get; private set; } = new Matrix(1, 1); /// /// The residuals of the fitted linear model. /// - public double[] Residuals { get; private set; } + public double[] Residuals { get; private set; } = Array.Empty(); /// /// The model standard error. @@ -188,7 +188,7 @@ public GeneralizedLinearModel(Matrix x, Vector y, bool hasIntercept = true, Link /// /// Gets the optimizer used to train the model. Default = Nelder-Mead. /// - public Optimizer Optimizer { get; private set; } + public Optimizer Optimizer { get; private set; } = null!; /// /// Gets the link function type. diff --git a/Numerics/Machine Learning/Supervised/KNearestNeighbors.cs b/Numerics/Machine Learning/Supervised/KNearestNeighbors.cs index 24b4e42f..f8c7bdfd 100644 --- a/Numerics/Machine Learning/Supervised/KNearestNeighbors.cs +++ b/Numerics/Machine Learning/Supervised/KNearestNeighbors.cs @@ -271,7 +271,7 @@ public double[] BootstrapPredict(Matrix X, int seed = -1) /// The test matrix of predictors private int[] kNN(Matrix xTrain, Vector yTrain, Matrix xTest) { - if (NumberOfFeatures != xTrain.NumberOfColumns) return null; + if (NumberOfFeatures != xTrain.NumberOfColumns) return null!; int R = xTest.NumberOfRows; var result = new int[K]; for (int i = 0; i < R; i++) @@ -306,7 +306,7 @@ private int[] kNN(Matrix xTrain, Vector yTrain, Matrix xTest) /// The test matrix of predictors private double[] kNNPredict(Matrix xTrain, Vector yTrain, Matrix xTest) { - if (xTest.NumberOfColumns != xTrain.NumberOfColumns) return null; + if (xTest.NumberOfColumns != xTrain.NumberOfColumns) return null!; int R = xTest.NumberOfRows; var result = new double[R]; for (int i = 0; i < R; i++) diff --git a/Numerics/Machine Learning/Supervised/NaiveBayes.cs b/Numerics/Machine Learning/Supervised/NaiveBayes.cs index 3a16ef9d..7a47721b 100644 --- a/Numerics/Machine Learning/Supervised/NaiveBayes.cs +++ b/Numerics/Machine Learning/Supervised/NaiveBayes.cs @@ -143,17 +143,17 @@ public NaiveBayes(Matrix x, Vector y) /// /// The means of each feature given each class. /// - public double[,] Means { get; private set; } + public double[,] Means { get; private set; } = null!; /// /// The standard deviations of each feature given each class. /// - public double[,] StandardDeviations { get; private set; } + public double[,] StandardDeviations { get; private set; } = null!; /// /// The prior probability for each class. /// - public double[] Priors { get; private set; } + public double[] Priors { get; private set; } = null!; /// /// Determines if the classifier has been trained. @@ -245,7 +245,7 @@ public double[] Predict(double[,] X) /// The matrix of predictors. public double[] Predict(Matrix X) { - if (!IsTrained || X.NumberOfColumns != this.X.NumberOfColumns) return null; + if (!IsTrained || X.NumberOfColumns != this.X.NumberOfColumns) return null!; var result = new double[X.NumberOfRows]; for (int i = 0; i < X.NumberOfRows; i++) { diff --git a/Numerics/Machine Learning/Supervised/RandomForest.cs b/Numerics/Machine Learning/Supervised/RandomForest.cs index 5d8e6b13..94da4efe 100644 --- a/Numerics/Machine Learning/Supervised/RandomForest.cs +++ b/Numerics/Machine Learning/Supervised/RandomForest.cs @@ -166,7 +166,7 @@ public RandomForest(Matrix x, Vector y, int seed = -1) /// /// The array of decision trees. /// - public DecisionTree[] DecisionTrees { get; private set; } + public DecisionTree[] DecisionTrees { get; private set; } = null!; /// /// Determines whether this is for regression or classification. Default = regression. @@ -250,7 +250,7 @@ private DecisionTree BootstrapDecisionTree(int seed = -1) /// The confidence level; Default = 0.1, which will result in the 90% confidence intervals. public double[,] Predict(Matrix X, double alpha = 0.1) { - if (!IsTrained) return null; + if (!IsTrained) return null!; var percentiles = new double[] { alpha / 2d, 0.5, 1d - alpha / 2d }; var output = new double[X.NumberOfRows, 4]; // lower, median, upper, mean diff --git a/Numerics/Machine Learning/Support/DecisionNode.cs b/Numerics/Machine Learning/Support/DecisionNode.cs index 292e3c43..4a7d0014 100644 --- a/Numerics/Machine Learning/Support/DecisionNode.cs +++ b/Numerics/Machine Learning/Support/DecisionNode.cs @@ -54,12 +54,12 @@ public class DecisionNode /// /// Nodes to the left of the threshold. /// - public DecisionNode Left { get; set; } = null; + public DecisionNode Left { get; set; } = null!; /// /// Nodes to the right of the threshold. /// - public DecisionNode Right { get; set; } = null; + public DecisionNode Right { get; set; } = null!; /// /// The leaf node value. diff --git a/Numerics/Machine Learning/Unsupervised/GaussianMixtureModel.cs b/Numerics/Machine Learning/Unsupervised/GaussianMixtureModel.cs index 3429cd32..25ccdeb1 100644 --- a/Numerics/Machine Learning/Unsupervised/GaussianMixtureModel.cs +++ b/Numerics/Machine Learning/Unsupervised/GaussianMixtureModel.cs @@ -152,12 +152,12 @@ public GaussianMixtureModel(Matrix X, int k) /// /// The mixing weights /// - public double[] Weights { get; private set; } + public double[] Weights { get; private set; } = null!; /// /// The likelihood of each data point (row) and for each cluster (column). /// - public double[,] LikelihoodMatrix { get; private set; } + public double[,] LikelihoodMatrix { get; private set; } = null!; /// /// The total log-likelihood of the fit. diff --git a/Numerics/Machine Learning/Unsupervised/JenksNaturalBreaks.cs b/Numerics/Machine Learning/Unsupervised/JenksNaturalBreaks.cs index 81ab280f..c2df6e79 100644 --- a/Numerics/Machine Learning/Unsupervised/JenksNaturalBreaks.cs +++ b/Numerics/Machine Learning/Unsupervised/JenksNaturalBreaks.cs @@ -128,12 +128,12 @@ public JenksNaturalBreaks(IList data, int numberOfClusters, bool isDataSo /// /// Gets the array of estimated clusters. /// - public JenksCluster[] Clusters { get; private set; } + public JenksCluster[] Clusters { get; private set; } = null!; /// /// The array of break points. /// - public double[] Breaks { get; private set; } + public double[] Breaks { get; private set; } = null!; /// /// The goodness of fit measure. The closer to 1, the better the fit. diff --git a/Numerics/Mathematics/Differentiation/NumericalDerivative.cs b/Numerics/Mathematics/Differentiation/NumericalDerivative.cs index 848cca9e..312c2fa6 100644 --- a/Numerics/Mathematics/Differentiation/NumericalDerivative.cs +++ b/Numerics/Mathematics/Differentiation/NumericalDerivative.cs @@ -427,8 +427,8 @@ public static double SecondDerivativeBackward(Func f, double poi public static double[,] Jacobian( Func g, double[] theta, - double[]? lowerBounds = null, - double[]? upperBounds = null, + double[] lowerBounds = null!, + double[] upperBounds = null!, double relStep = 1e-5, double absStep = 1e-7, int maxBacktrack = 5) @@ -619,8 +619,8 @@ public static double SecondDerivativeBackward(Func f, double poi public static double[] Gradient( Func f, double[] theta, - double[]? lowerBounds = null, - double[]? upperBounds = null, + double[] lowerBounds = null!, + double[] upperBounds = null!, double relStep = 1e-5, double absStep = 1e-7, int maxBacktrack = 5) @@ -798,8 +798,8 @@ public static double[] Gradient( public static double[,] Hessian( Func f, double[] theta, - double[]? lowerBounds = null, - double[]? upperBounds = null, + double[] lowerBounds = null!, + double[] upperBounds = null!, double relStep = 1e-4, double absStep = 1e-6, int maxBacktrack = 6) diff --git a/Numerics/Mathematics/Fourier Methods/Fourier.cs b/Numerics/Mathematics/Fourier Methods/Fourier.cs index 21d50ec3..922a56ce 100644 --- a/Numerics/Mathematics/Fourier Methods/Fourier.cs +++ b/Numerics/Mathematics/Fourier Methods/Fourier.cs @@ -283,7 +283,7 @@ public static double[] Correlation(double[] data1, double[] data2) if (lagMax < 0) lagMax = (int)Math.Floor(Math.Min(10d * Math.Log10(n), n - 1)); if (lagMax < 1 || n < 2) - return null; + return null!; // Pad the length to be the power of 2 to facilitate FFT speed. int newLength = Convert.ToInt32(Math.Pow(2d, Math.Ceiling(Math.Log(series.Count, 2d)))); // Normalize the data series diff --git a/Numerics/Mathematics/Integration/AdaptiveGuassKronrod.cs b/Numerics/Mathematics/Integration/AdaptiveGuassKronrod.cs index ec1fb7e5..f942fd0b 100644 --- a/Numerics/Mathematics/Integration/AdaptiveGuassKronrod.cs +++ b/Numerics/Mathematics/Integration/AdaptiveGuassKronrod.cs @@ -186,10 +186,10 @@ public override void Integrate() Status = IntegrationStatus.Success; } } - catch (Exception ex) + catch (Exception) { Status = IntegrationStatus.Failure; - if (ReportFailure) throw ex; + if (ReportFailure) throw; } } @@ -231,10 +231,10 @@ public void Integrate(List bins) Status = IntegrationStatus.Success; } } - catch (Exception ex) + catch (Exception) { Status = IntegrationStatus.Failure; - if (ReportFailure) throw ex; + if (ReportFailure) throw; } } diff --git a/Numerics/Mathematics/Integration/AdaptiveSimpsonsRule2D.cs b/Numerics/Mathematics/Integration/AdaptiveSimpsonsRule2D.cs index 81dbda07..5539218e 100644 --- a/Numerics/Mathematics/Integration/AdaptiveSimpsonsRule2D.cs +++ b/Numerics/Mathematics/Integration/AdaptiveSimpsonsRule2D.cs @@ -170,10 +170,10 @@ public override void Integrate() Status = IntegrationStatus.Success; } } - catch (Exception ex) + catch (Exception) { Status = IntegrationStatus.Failure; - if (ReportFailure) throw ex; + if (ReportFailure) throw; } } diff --git a/Numerics/Mathematics/Integration/Miser.cs b/Numerics/Mathematics/Integration/Miser.cs index 15dde370..215feddf 100644 --- a/Numerics/Mathematics/Integration/Miser.cs +++ b/Numerics/Mathematics/Integration/Miser.cs @@ -340,7 +340,7 @@ private void miser(Func function, double[] regn, int npts, dou private void ranpt(double[] pt, double[] regn) { int j, n = pt.Length; - double[] rnd = null; + double[] rnd = null!; if (UseSobolSequence) rnd = _sobol.NextDouble(); diff --git a/Numerics/Mathematics/Integration/Support/Integrator.cs b/Numerics/Mathematics/Integration/Support/Integrator.cs index f9aac968..e9baeceb 100644 --- a/Numerics/Mathematics/Integration/Support/Integrator.cs +++ b/Numerics/Mathematics/Integration/Support/Integrator.cs @@ -150,7 +150,7 @@ protected virtual void Validate() /// /// Optimization status. /// Inner exception. - protected virtual void UpdateStatus(IntegrationStatus status, Exception? exception = null) + protected virtual void UpdateStatus(IntegrationStatus status, Exception exception = null!) { Status = status; if (status == IntegrationStatus.Failure) diff --git a/Numerics/Mathematics/Integration/Vegas.cs b/Numerics/Mathematics/Integration/Vegas.cs index fc3a93bb..39205954 100644 --- a/Numerics/Mathematics/Integration/Vegas.cs +++ b/Numerics/Mathematics/Integration/Vegas.cs @@ -112,7 +112,7 @@ public Vegas(Func function, int dimensions, IList function, int dimensions, IList diff --git a/Numerics/Mathematics/Linear Algebra/Support/Matrix.cs b/Numerics/Mathematics/Linear Algebra/Support/Matrix.cs index a2f5c333..3688d63a 100644 --- a/Numerics/Mathematics/Linear Algebra/Support/Matrix.cs +++ b/Numerics/Mathematics/Linear Algebra/Support/Matrix.cs @@ -186,8 +186,10 @@ public Matrix(XElement xElement) int nrow = 0, ncol = 0; - if (xElement.Attribute(nameof(NumberOfRows)) != null) int.TryParse(xElement.Attribute(nameof(NumberOfRows)).Value, NumberStyles.Any, CultureInfo.InvariantCulture, out nrow); - if (xElement.Attribute(nameof(NumberOfColumns)) != null) int.TryParse(xElement.Attribute(nameof(NumberOfColumns)).Value, NumberStyles.Any, CultureInfo.InvariantCulture, out ncol); + var rowsAttr = xElement.Attribute(nameof(NumberOfRows)); + var colsAttr = xElement.Attribute(nameof(NumberOfColumns)); + if (rowsAttr != null) int.TryParse(rowsAttr.Value, NumberStyles.Any, CultureInfo.InvariantCulture, out nrow); + if (colsAttr != null) int.TryParse(colsAttr.Value, NumberStyles.Any, CultureInfo.InvariantCulture, out ncol); _matrix = new double[nrow, ncol]; int rowCount = 0; @@ -219,7 +221,7 @@ public Matrix(XElement xElement) #region Members - private double[,] _matrix; + private double[,] _matrix = null!; /// /// Gets the number of rows. @@ -251,7 +253,7 @@ public int NumberOfColumns /// /// The matrix column header text. /// - public string[] Header { get; set; } + public string[] Header { get; set; } = null!; /// /// Evaluates whether this matrix is symmetric. diff --git a/Numerics/Mathematics/Linear Algebra/Support/Vector.cs b/Numerics/Mathematics/Linear Algebra/Support/Vector.cs index eb687253..b72e1416 100644 --- a/Numerics/Mathematics/Linear Algebra/Support/Vector.cs +++ b/Numerics/Mathematics/Linear Algebra/Support/Vector.cs @@ -108,10 +108,10 @@ public double this[int index] set { _vector[index] = value; } } - /// - /// The vector header text. - /// - public string Header { get; set; } + /// + /// The vector header text. + /// + public string Header { get; set; } = null!; /// diff --git a/Numerics/Mathematics/Optimization/Dynamic/Dijkstra.cs b/Numerics/Mathematics/Optimization/Dynamic/Dijkstra.cs index 1f0bcdf7..05197518 100644 --- a/Numerics/Mathematics/Optimization/Dynamic/Dijkstra.cs +++ b/Numerics/Mathematics/Optimization/Dynamic/Dijkstra.cs @@ -94,7 +94,7 @@ public static bool PathExists(float[,] resultTable, int nodeIndex) /// Optional number of nodes in the network. If not provided it will be calculated internally. /// Optional list of incoming edges from each node in the network. If not provided or mismatched with edges it will be calculated internally. /// Lookup table of shortest paths from any given node. - public static float[,] Solve(IList edges, int[] destinationIndices, int nodeCount = -1, List[] edgesFromNodes = null) + public static float[,] Solve(IList edges, int[] destinationIndices, int nodeCount = -1, List[] edgesFromNodes = null!) { // Set optional parameters if required. int nNodes = (nodeCount == -1) ? (edges.Max(o => Math.Max(o.FromIndex,o.ToIndex)) + 1) : nodeCount; @@ -147,7 +147,7 @@ public static bool PathExists(float[,] resultTable, int nodeIndex) /// Optional number of nodes in the network. If not provided it will be calculated internally. /// Optional list of incoming edges from each node in the network. If not provided or mismatched with edges it will be calculated internally. /// Lookup table of shortest paths from any given node. - public static float[,] Solve(IList edges, int destinationIndex, int nodeCount = -1, List[] edgesToNodes = null) + public static float[,] Solve(IList edges, int destinationIndex, int nodeCount = -1, List[] edgesToNodes = null!) { // Set optional parameters if required. int nNodes = (nodeCount == -1) ? (edges.Max(o => Math.Max(o.FromIndex, o.ToIndex)) + 1) : nodeCount; diff --git a/Numerics/Mathematics/Optimization/Dynamic/Network.cs b/Numerics/Mathematics/Optimization/Dynamic/Network.cs index ac5bdb50..6c4968b6 100644 --- a/Numerics/Mathematics/Optimization/Dynamic/Network.cs +++ b/Numerics/Mathematics/Optimization/Dynamic/Network.cs @@ -166,7 +166,7 @@ public List GetPath(int[] edgesToRemove, int startNodeIndex) } } // if n = 0, then no roads to escape to - if (heap.Count == 0) return null; + if (heap.Count == 0) return null!; float tempWeight; int tempIndex; @@ -345,7 +345,7 @@ public List GetPath(int[] edgesToRemove, int startNodeIndex) return UpdatedPath; } - else return null; + else return null!; } public List GetPath(int[] edgesToRemove, int startNodeIndex, float[,] existingResultsTable) @@ -407,7 +407,7 @@ public List GetPath(int[] edgesToRemove, int startNodeIndex, float[,] exist } //if n = 0 then no roads to escape to - if (heap.Count == 0) return null; + if (heap.Count == 0) return null!; float tempWeight; int tempIndex; diff --git a/Numerics/Mathematics/Optimization/Global/MLSL.cs b/Numerics/Mathematics/Optimization/Global/MLSL.cs index 4525f536..01c066bf 100644 --- a/Numerics/Mathematics/Optimization/Global/MLSL.cs +++ b/Numerics/Mathematics/Optimization/Global/MLSL.cs @@ -175,12 +175,12 @@ public MLSL(Func objectiveFunction, int numberOfParameters, IL /// /// The list of all sampled points. /// - public List SampledPoints { get; private set; } + public List SampledPoints { get; private set; } = null!; /// /// The list of all local optimums. /// - public List LocalMinimums { get; private set; } + public List LocalMinimums { get; private set; } = null!; /// /// The minimum number of iterations to carry out with no improvement. Default = 5. @@ -207,7 +207,7 @@ protected override void Optimize() double oldFit = double.MaxValue; int noImprovement = 0; bool cancel = false; - Optimizer solver = null; + Optimizer solver = null!; var prng = new MersenneTwister(PRNGSeed); // Set lower and upper bounds and @@ -378,7 +378,7 @@ protected override void Optimize() private Optimizer GetLocalOptimizer(IList initialValues, double relativeTolerance, double absoluteTolerance, ref bool cancel) { bool localCancel = false; - Optimizer solver = null; + Optimizer solver = null!; // Make sure the parameters are within the bounds. for (int i = 0; i < NumberOfParameters; i++) diff --git a/Numerics/Mathematics/Optimization/Global/MultiStart.cs b/Numerics/Mathematics/Optimization/Global/MultiStart.cs index 56b5d71b..05ac9006 100644 --- a/Numerics/Mathematics/Optimization/Global/MultiStart.cs +++ b/Numerics/Mathematics/Optimization/Global/MultiStart.cs @@ -163,7 +163,7 @@ protected override void Optimize() { int i, j, D = NumberOfParameters; bool cancel = false; - Optimizer solver = null; + Optimizer solver = null!; // Set lower and upper bounds and // create uniform distributions for each parameter @@ -219,7 +219,7 @@ protected override void Optimize() private Optimizer GetLocalOptimizer(IList initialValues, double relativeTolerance, double absoluteTolerance, ref bool cancel) { bool localCancel = false; - Optimizer solver = null; + Optimizer solver = null!; // Make sure the parameters are within the bounds. for (int i = 0; i < NumberOfParameters; i++) diff --git a/Numerics/Mathematics/Optimization/Global/ParticleSwarm.cs b/Numerics/Mathematics/Optimization/Global/ParticleSwarm.cs index 24d6c28a..566664f4 100644 --- a/Numerics/Mathematics/Optimization/Global/ParticleSwarm.cs +++ b/Numerics/Mathematics/Optimization/Global/ParticleSwarm.cs @@ -235,7 +235,7 @@ private class Particle /// The velocity determines how the particle moves through the search space. /// It is updated based on the particle's personal best and the global best. /// - public double[] Velocity { get; set; } + public double[] Velocity { get; set; } = null!; } } } diff --git a/Numerics/Mathematics/Optimization/Local/ADAM.cs b/Numerics/Mathematics/Optimization/Local/ADAM.cs index 6ba26947..0d8466ec 100644 --- a/Numerics/Mathematics/Optimization/Local/ADAM.cs +++ b/Numerics/Mathematics/Optimization/Local/ADAM.cs @@ -76,7 +76,7 @@ public class ADAM : Optimizer /// Optional. Function to evaluate the gradient. Default uses finite difference. public ADAM(Func objectiveFunction, int numberOfParameters, IList initialValues, IList lowerBounds, IList upperBounds, double alpha = 0.001, - Func gradient = null) : base(objectiveFunction, numberOfParameters) + Func gradient = null!) : base(objectiveFunction, numberOfParameters) { // Check if the length of the initial, lower and upper bounds equal the number of parameters if (initialValues.Count != numberOfParameters || lowerBounds.Count != numberOfParameters || upperBounds.Count != numberOfParameters) diff --git a/Numerics/Mathematics/Optimization/Local/BFGS.cs b/Numerics/Mathematics/Optimization/Local/BFGS.cs index 8f76256e..90f949a0 100644 --- a/Numerics/Mathematics/Optimization/Local/BFGS.cs +++ b/Numerics/Mathematics/Optimization/Local/BFGS.cs @@ -78,7 +78,7 @@ public class BFGS : Optimizer /// Optional. Function to evaluate the gradient. Default uses finite difference. public BFGS(Func objectiveFunction, int numberOfParameters, IList initialValues, IList lowerBounds, IList upperBounds, - Func gradient = null) : base(objectiveFunction, numberOfParameters) + Func gradient = null!) : base(objectiveFunction, numberOfParameters) { // Check if the length of the initial, lower and upper bounds equal the number of parameters if (initialValues.Count != numberOfParameters || lowerBounds.Count != numberOfParameters || upperBounds.Count != numberOfParameters) diff --git a/Numerics/Mathematics/Optimization/Local/GradientDescent.cs b/Numerics/Mathematics/Optimization/Local/GradientDescent.cs index fd4e64d5..d19320af 100644 --- a/Numerics/Mathematics/Optimization/Local/GradientDescent.cs +++ b/Numerics/Mathematics/Optimization/Local/GradientDescent.cs @@ -77,7 +77,7 @@ public class GradientDescent : Optimizer /// Optional. Function to evaluate the gradient. Default uses finite difference. public GradientDescent(Func objectiveFunction, int numberOfParameters, IList initialValues, IList lowerBounds, IList upperBounds, double alpha = 0.001, - Func gradient = null) : base(objectiveFunction, numberOfParameters) + Func gradient = null!) : base(objectiveFunction, numberOfParameters) { // Check if the length of the initial, lower and upper bounds equal the number of parameters if (initialValues.Count != numberOfParameters || lowerBounds.Count != numberOfParameters || upperBounds.Count != numberOfParameters) diff --git a/Numerics/Mathematics/Optimization/Support/Optimizer.cs b/Numerics/Mathematics/Optimization/Support/Optimizer.cs index fc22a0ea..f5605d56 100644 --- a/Numerics/Mathematics/Optimization/Support/Optimizer.cs +++ b/Numerics/Mathematics/Optimization/Support/Optimizer.cs @@ -60,7 +60,7 @@ protected Optimizer(Func objectiveFunction, int numberOfParame #region Inputs - private Func _objectiveFunction; + private Func _objectiveFunction = null!; /// /// The maximum number of optimization iterations allowed. Default = 10,000. @@ -142,7 +142,7 @@ public Func ObjectiveFunction /// /// A trace of the parameter set and fitness evaluated until convergence. /// - public List ParameterSetTrace { get; protected set; } + public List ParameterSetTrace { get; protected set; } = null!; /// /// Determines the optimization method status. @@ -152,7 +152,7 @@ public Func ObjectiveFunction /// /// The numerically differentiated Hessian matrix. This is only computed when the optimization is successful. /// - public Matrix Hessian { get; protected set; } + public Matrix Hessian { get; protected set; } = null!; #endregion @@ -166,7 +166,7 @@ public virtual void ClearResults() BestParameterSet = new ParameterSet(); ParameterSetTrace = new List(); Status = OptimizationStatus.None; - Hessian = null; + Hessian = null!; } /// @@ -287,7 +287,7 @@ protected virtual double RepairParameter(double value, double lowerBound, double /// /// Optimization status. /// Inner exception. - protected virtual void UpdateStatus(OptimizationStatus status, Exception exception = null) + protected virtual void UpdateStatus(OptimizationStatus status, Exception exception = null!) { Status = status; if (status == OptimizationStatus.MaximumIterationsReached) diff --git a/Numerics/Mathematics/Optimization/Support/ParameterSet.cs b/Numerics/Mathematics/Optimization/Support/ParameterSet.cs index e154df41..86a61141 100644 --- a/Numerics/Mathematics/Optimization/Support/ParameterSet.cs +++ b/Numerics/Mathematics/Optimization/Support/ParameterSet.cs @@ -94,14 +94,17 @@ public ParameterSet(XElement xElement) Values[i] = outVal; } } - if (xElement.Attribute(nameof(Fitness)) != null) + + var fitAttr = xElement.Attribute(nameof(Fitness)); + if (fitAttr != null) { - double.TryParse(xElement.Attribute(nameof(Fitness)).Value, NumberStyles.Any, CultureInfo.InvariantCulture, out var fitness); + double.TryParse(fitAttr.Value, NumberStyles.Any, CultureInfo.InvariantCulture, out var fitness); Fitness = fitness; } - if (xElement.Attribute(nameof(Weight)) != null) + var weightAttr = xElement.Attribute(nameof(Weight)); + if (weightAttr != null) { - double.TryParse(xElement.Attribute(nameof(Weight)).Value, NumberStyles.Any, CultureInfo.InvariantCulture, out var weight); + double.TryParse(weightAttr.Value, NumberStyles.Any, CultureInfo.InvariantCulture, out var weight); Weight = weight; } } @@ -109,7 +112,7 @@ public ParameterSet(XElement xElement) /// /// The trial parameter set values. /// - public double[] Values; + public double[] Values = null!; /// /// The objective function result (or fitness) given the trial parameter set. diff --git a/Numerics/Numerics.csproj b/Numerics/Numerics.csproj index 4c2823b6..380eca32 100644 --- a/Numerics/Numerics.csproj +++ b/Numerics/Numerics.csproj @@ -41,7 +41,7 @@ - + all runtime; build; native; contentfiles; analyzers @@ -64,11 +64,11 @@ - - - - - + + + + + diff --git a/Numerics/Sampling/Bootstrap/Bootstrap.cs b/Numerics/Sampling/Bootstrap/Bootstrap.cs index b8d694ca..c64db1c7 100644 --- a/Numerics/Sampling/Bootstrap/Bootstrap.cs +++ b/Numerics/Sampling/Bootstrap/Bootstrap.cs @@ -44,17 +44,17 @@ public class Bootstrap /// /// Delegate function for resampling the original data and model fit. /// - public Func ResampleFunction { get; set; } + public Func ResampleFunction { get; set; } = null!; /// /// Delegate function for fitting a model. /// - public Func FitFunction { get; set; } + public Func FitFunction { get; set; } = null!; /// /// Delegate function for extracting a statistic from the fit result. /// - public Func StatisticFunction { get; set; } + public Func StatisticFunction { get; set; } = null!; /// /// Number of bootstrap replicates. @@ -79,8 +79,8 @@ public Bootstrap(TData originalData, ParameterSet originalParameters) private TData _originalData; private ParameterSet _originalParameters; - private ParameterSet[] _bootstrapParameterSets; - private double[][] _bootstrapStatistics; + private ParameterSet[] _bootstrapParameterSets = null!; + private double[][] _bootstrapStatistics = null!; /// /// Gets the bootstrapped model parameter sets. diff --git a/Numerics/Sampling/MCMC/ARWMH.cs b/Numerics/Sampling/MCMC/ARWMH.cs index 845b4688..59bd92ab 100644 --- a/Numerics/Sampling/MCMC/ARWMH.cs +++ b/Numerics/Sampling/MCMC/ARWMH.cs @@ -76,8 +76,8 @@ public ARWMH(List priorDistributions, LogLikelihood log private Matrix sigmaIdentity; - private RunningCovarianceMatrix[] sigma; - private MultivariateNormal[] mvn; + private RunningCovarianceMatrix[] sigma = null!; + private MultivariateNormal[] mvn = null!; /// /// The scaling parameter used to scale the adaptive covariance matrix. diff --git a/Numerics/Sampling/MCMC/Base/MCMCSampler.cs b/Numerics/Sampling/MCMC/Base/MCMCSampler.cs index 5135ed16..0c92873a 100644 --- a/Numerics/Sampling/MCMC/Base/MCMCSampler.cs +++ b/Numerics/Sampling/MCMC/Base/MCMCSampler.cs @@ -176,17 +176,17 @@ public int ThinningInterval /// /// The master pseudo random number generator (PRNG). /// - protected Random _masterPRNG; + protected Random _masterPRNG = null!; /// /// The PRNG for each Markov Chain. /// - protected Random[] _chainPRNGs; + protected Random[] _chainPRNGs = null!; /// /// The current states of each chain. /// - protected ParameterSet[] _chainStates; + protected ParameterSet[] _chainStates = null!; /// /// The Log-Likelihood function to evaluate. @@ -251,12 +251,12 @@ public enum InitializationType /// /// The Multivariate Normal proposal distribution set from the MAP estimate. /// - protected MultivariateNormal _MVN; + protected MultivariateNormal _MVN = null!; /// /// Event is raised when the simulation progress changes. /// - public event ProgressChangedEventHandler ProgressChanged; + public event ProgressChangedEventHandler ProgressChanged = null!; /// /// Event is raised when the simulation progress changes. @@ -273,7 +273,7 @@ public enum InitializationType /// /// Cancellation token source. /// - public CancellationTokenSource CancellationTokenSource { get; set; } + public CancellationTokenSource CancellationTokenSource { get; set; } = null!; #endregion @@ -282,22 +282,22 @@ public enum InitializationType /// /// Gets the population matrix used for population-based samplers. /// - public List PopulationMatrix { get; protected set; } + public List PopulationMatrix { get; protected set; } = null!; /// /// Gets the list of sampled Markov Chains. /// - public List[] MarkovChains { get; protected set; } + public List[] MarkovChains { get; protected set; } = null!; /// /// Keeps track of the number of accepted samples per chain. /// - public int[] AcceptCount { get; protected set; } + public int[] AcceptCount { get; protected set; } = null!; /// /// Keeps track of the number of calls to the proposal sampler per chain. /// - public int[] SampleCount { get; protected set; } + public int[] SampleCount { get; protected set; } = null!; /// /// The acceptance rate per chain. @@ -316,7 +316,7 @@ public double[] AcceptanceRates /// /// The average log-likelihood across each chain for each iteration. /// - public List MeanLogLikelihood { get; protected set; } + public List MeanLogLikelihood { get; protected set; } = null!; /// /// Gets and sets the number of posterior parameter sets to output. @@ -326,7 +326,7 @@ public double[] AcceptanceRates /// /// Output posterior parameter sets. These are recorded after the iterations have been completed. /// - public List[] Output { get; protected set; } + public List[] Output { get; protected set; } = null!; /// /// The output parameter set that produced the maximum likelihood. diff --git a/Numerics/Sampling/MCMC/HMC.cs b/Numerics/Sampling/MCMC/HMC.cs index ad9a125a..36ee7181 100644 --- a/Numerics/Sampling/MCMC/HMC.cs +++ b/Numerics/Sampling/MCMC/HMC.cs @@ -80,7 +80,7 @@ public class HMC : MCMCSampler /// Optional. The leapfrog step size. Default = 0.1. /// Optional. The number of leapfrog steps. Default = 50. /// Optional. The function for evaluating the gradient of the log-likelihood. Numerical finite difference will be used by default. - public HMC(List priorDistributions, LogLikelihood logLikelihoodFunction, Vector? mass = null, double stepSize = 0.1, int steps = 50, Gradient? gradientFunction = null) : base(priorDistributions, logLikelihoodFunction) + public HMC(List priorDistributions, LogLikelihood logLikelihoodFunction, Vector mass = null!, double stepSize = 0.1, int steps = 50, Gradient gradientFunction = null!) : base(priorDistributions, logLikelihoodFunction) { InitialIterations = 100 * NumberOfParameters; diff --git a/Numerics/Sampling/MCMC/RWMH.cs b/Numerics/Sampling/MCMC/RWMH.cs index a516ced8..d3801c8c 100644 --- a/Numerics/Sampling/MCMC/RWMH.cs +++ b/Numerics/Sampling/MCMC/RWMH.cs @@ -69,7 +69,7 @@ public RWMH(List priorDistributions, LogLikelihood logL ProposalSigma = proposalSigma; } - private MultivariateNormal[] mvn; + private MultivariateNormal[] mvn = null!; /// /// The covariance matrix Σ (sigma) for the proposal distribution. diff --git a/Numerics/Sampling/MCMC/SNIS.cs b/Numerics/Sampling/MCMC/SNIS.cs index 16acd5b7..5a0561a5 100644 --- a/Numerics/Sampling/MCMC/SNIS.cs +++ b/Numerics/Sampling/MCMC/SNIS.cs @@ -59,7 +59,7 @@ public class SNIS : MCMCSampler /// The list of prior distributions for the model parameters. /// The Log-Likelihood function to evaluate. /// Optional. The multivariate Normal distribution is used for importance sampling. If null, naive Monte Carlo is performed. - public SNIS(List priorDistributions, LogLikelihood logLikelihoodFunction, MultivariateNormal? multivariateNormal = null) : base(priorDistributions, logLikelihoodFunction) + public SNIS(List priorDistributions, LogLikelihood logLikelihoodFunction, MultivariateNormal multivariateNormal = null!) : base(priorDistributions, logLikelihoodFunction) { mvn = multivariateNormal; useImportanceSampling = multivariateNormal != null ? true : false; @@ -75,7 +75,7 @@ public SNIS(List priorDistributions, LogLikelihood logL } private bool useImportanceSampling = false; - private MultivariateNormal? mvn = null; + private MultivariateNormal mvn = null!; /// protected override void InitializeCustomSettings() diff --git a/Numerics/Sampling/MCMC/Support/MCMCResults.cs b/Numerics/Sampling/MCMC/Support/MCMCResults.cs index 9aae090f..7ba6bb2a 100644 --- a/Numerics/Sampling/MCMC/Support/MCMCResults.cs +++ b/Numerics/Sampling/MCMC/Support/MCMCResults.cs @@ -94,31 +94,31 @@ public MCMCResults(ParameterSet map, IList parameterSets, double a /// The list of sampled Markov Chains. /// [JsonInclude] - public List[] MarkovChains { get; private set; } + public List[] MarkovChains { get; private set; } = null!; /// /// Output posterior parameter sets. /// [JsonInclude] - public List Output { get; private set; } + public List Output { get; private set; } = null!; /// /// The average log-likelihood across each chain for each iteration. /// [JsonInclude] - public List MeanLogLikelihood { get; private set; } + public List MeanLogLikelihood { get; private set; } = null!; /// /// The acceptance rate for each chain. /// [JsonInclude] - public double[] AcceptanceRates { get; private set; } + public double[] AcceptanceRates { get; private set; } = null!; /// /// Parameter results using the output posterior parameter sets. /// [JsonInclude] - public ParameterResults[] ParameterResults { get; private set; } + public ParameterResults[] ParameterResults { get; private set; } = null!; /// /// The output parameter set that produced the maximum likelihood. @@ -210,7 +210,8 @@ public static MCMCResults FromByteArray(byte[] bytes) }; try { - return JsonSerializer.Deserialize(bytes, options); + return JsonSerializer.Deserialize(bytes, options) + ?? throw new JsonException("Deserialized MCMCResults was null"); } catch { diff --git a/Numerics/Sampling/MCMC/Support/ParameterResults.cs b/Numerics/Sampling/MCMC/Support/ParameterResults.cs index c96c7fa8..7ac720a0 100644 --- a/Numerics/Sampling/MCMC/Support/ParameterResults.cs +++ b/Numerics/Sampling/MCMC/Support/ParameterResults.cs @@ -102,7 +102,7 @@ public ParameterResults(double[] values, double alpha = 0.1, bool sorted = false /// /// The autocorrelation function for each parameter. This is averaged across each chain. /// - public double[,] Autocorrelation { get; set; } + public double[,] Autocorrelation { get; set; } = null!; } } diff --git a/Numerics/Sampling/SobolSequence.cs b/Numerics/Sampling/SobolSequence.cs index c8ea8c39..4f6b753e 100644 --- a/Numerics/Sampling/SobolSequence.cs +++ b/Numerics/Sampling/SobolSequence.cs @@ -136,8 +136,8 @@ private void initialize() reader.ReadLine(); int index = 1; - string line = null; - while ((line = reader.ReadLine()) != null) + string? line; + while ((line = reader.ReadLine()) is not null) { var st = line.Split(' '); diff --git a/Numerics/Sampling/StratificationBin.cs b/Numerics/Sampling/StratificationBin.cs index 0cfc3c64..db403ae7 100644 --- a/Numerics/Sampling/StratificationBin.cs +++ b/Numerics/Sampling/StratificationBin.cs @@ -83,21 +83,24 @@ public StratificationBin(double lowerBound, double upperBound, double weight = - public StratificationBin(XElement element) { // Get required data - if (element.Attribute(nameof(LowerBound)) != null) + var lowerBoundAttr = element.Attribute(nameof(LowerBound)); + if (lowerBoundAttr != null) { - double.TryParse(element.Attribute(nameof(LowerBound)).Value, NumberStyles.Any, CultureInfo.InvariantCulture, out var lower); + double.TryParse(lowerBoundAttr.Value, NumberStyles.Any, CultureInfo.InvariantCulture, out var lower); LowerBound = lower; } - if (element.Attribute(nameof(UpperBound)) != null) + var upperBoundAttr = element.Attribute(nameof(UpperBound)); + if (upperBoundAttr != null) { - double.TryParse(element.Attribute(nameof(UpperBound)).Value, NumberStyles.Any, CultureInfo.InvariantCulture, out var upper); + double.TryParse(upperBoundAttr.Value, NumberStyles.Any, CultureInfo.InvariantCulture, out var upper); UpperBound = upper; } - if (element.Attribute(nameof(Weight)) != null) + var weightAttr = element.Attribute(nameof(Weight)); + if (weightAttr != null) { - double.TryParse(element.Attribute(nameof(Weight)).Value, NumberStyles.Any, CultureInfo.InvariantCulture, out var weight); + double.TryParse(weightAttr.Value, NumberStyles.Any, CultureInfo.InvariantCulture, out var weight); Weight = weight; } } @@ -142,8 +145,11 @@ public bool Contains(double x) /// +1 if this bin is lower than the compared bin. /// -1 otherwise. /// - public int CompareTo(StratificationBin other) + public int CompareTo(StratificationBin? other) { + if (other == null) + throw new ArgumentNullException(nameof(other), "The stratification bin to compare to cannot be null."); + if (UpperBound > other.LowerBound && LowerBound < other.UpperBound) throw new ArgumentException("The bins cannot be overlapping.", nameof(other)); @@ -164,7 +170,7 @@ public object Clone() /// /// Checks whether two stratification bins are equal. /// - public override bool Equals(object obj) + public override bool Equals(object? obj) { if (!(obj is StratificationBin)) return false; diff --git a/Numerics/Sampling/StratificationOptions.cs b/Numerics/Sampling/StratificationOptions.cs index 94c1a45b..daf1032d 100644 --- a/Numerics/Sampling/StratificationOptions.cs +++ b/Numerics/Sampling/StratificationOptions.cs @@ -98,27 +98,31 @@ public StratificationOptions(double lowerBound, double upperBound, int numberOfB public StratificationOptions(XElement element) { // Get required data - if (element.Attribute(nameof(LowerBound)) != null) + var lowerBoundAttr = element.Attribute(nameof(LowerBound)); + if (lowerBoundAttr != null) { - double.TryParse(element.Attribute(nameof(LowerBound)).Value, NumberStyles.Any, CultureInfo.InvariantCulture, out var lower); + double.TryParse(lowerBoundAttr.Value, NumberStyles.Any, CultureInfo.InvariantCulture, out var lower); LowerBound = lower; } - if (element.Attribute(nameof(UpperBound)) != null) + var upperBoundAttr = element.Attribute(nameof(UpperBound)); + if (upperBoundAttr != null) { - double.TryParse(element.Attribute(nameof(UpperBound)).Value, NumberStyles.Any, CultureInfo.InvariantCulture, out var upper); + double.TryParse(upperBoundAttr.Value, NumberStyles.Any, CultureInfo.InvariantCulture, out var upper); UpperBound = upper; } - if (element.Attribute(nameof(NumberOfBins)) != null) + var numberOfBinsAttr = element.Attribute(nameof(NumberOfBins)); + if (numberOfBinsAttr != null) { - int.TryParse(element.Attribute(nameof(NumberOfBins)).Value, NumberStyles.Any, CultureInfo.InvariantCulture, out var nBins); + int.TryParse(numberOfBinsAttr.Value, NumberStyles.Any, CultureInfo.InvariantCulture, out var nBins); NumberOfBins = nBins; } - if (element.Attribute(nameof(IsProbability)) != null) + var isProbabilityAttr = element.Attribute(nameof(IsProbability)); + if (isProbabilityAttr != null) { - bool.TryParse(element.Attribute(nameof(IsProbability)).Value, out var isProbability); + bool.TryParse(isProbabilityAttr.Value, out var isProbability); IsProbability = isProbability; } @@ -228,7 +232,7 @@ private void Validate() /// /// Compares objects for equality. /// - public override bool Equals(object obj) + public override bool Equals(object? obj) { if (obj is not StratificationOptions other) return false; diff --git a/Numerics/Sampling/Stratify.cs b/Numerics/Sampling/Stratify.cs index 676b540a..2519d910 100644 --- a/Numerics/Sampling/Stratify.cs +++ b/Numerics/Sampling/Stratify.cs @@ -302,7 +302,7 @@ public static List Probabilities(StratificationOptions option /// The number of dimensions to stratify. /// Seed for random number generator. /// The correlation matrix. If null, independence is assumed. - public static List> MultivariateProbabilities(StratificationOptions options, ImportanceDistribution distributionType = ImportanceDistribution.Uniform, bool isExhaustive = true, int dimension = 1, int seed = -1, double[,] correlation = null) + public static List> MultivariateProbabilities(StratificationOptions options, ImportanceDistribution distributionType = ImportanceDistribution.Uniform, bool isExhaustive = true, int dimension = 1, int seed = -1, double[,] correlation = null!) { // Validate inputs var output = new List>(); diff --git a/Numerics/Utilities/ExtensionMethods.cs b/Numerics/Utilities/ExtensionMethods.cs index c83f06bc..e9f66ad9 100644 --- a/Numerics/Utilities/ExtensionMethods.cs +++ b/Numerics/Utilities/ExtensionMethods.cs @@ -63,7 +63,7 @@ public static T GetAttributeOfType(this Enum enumValue) where T : Attribute var type = enumValue.GetType(); var memInfo = type.GetMember(enumValue.ToString()); var attributes = memInfo[0].GetCustomAttributes(typeof(T), false); - return (attributes.Length > 0) ? (T)attributes[0] : null; + return (attributes.Length > 0) ? (T)attributes[0] : null!; } #endregion diff --git a/Numerics/Utilities/JsonConverters.cs b/Numerics/Utilities/JsonConverters.cs index 79877aaa..cc079278 100644 --- a/Numerics/Utilities/JsonConverters.cs +++ b/Numerics/Utilities/JsonConverters.cs @@ -78,14 +78,14 @@ public class Double2DArrayConverter : JsonConverter public override double[,] Read(ref Utf8JsonReader reader, Type typeToConvert, JsonSerializerOptions options) { if (reader.TokenType == JsonTokenType.Null) - return null; + return null!; if (reader.TokenType != JsonTokenType.StartObject) throw new JsonException("Expected StartObject token"); int rows = 0; int cols = 0; - double[] data = null; + double[]? data = null!; while (reader.Read()) { @@ -94,7 +94,7 @@ public class Double2DArrayConverter : JsonConverter if (reader.TokenType == JsonTokenType.PropertyName) { - string propertyName = reader.GetString(); + string? propertyName = reader.GetString(); reader.Read(); switch (propertyName) @@ -217,14 +217,14 @@ public class String2DArrayConverter : JsonConverter public override string[,] Read(ref Utf8JsonReader reader, Type typeToConvert, JsonSerializerOptions options) { if (reader.TokenType == JsonTokenType.Null) - return null; + return null!; if (reader.TokenType != JsonTokenType.StartObject) throw new JsonException("Expected StartObject token"); int rows = 0; int cols = 0; - string[] data = null; + string[]? data = null!; while (reader.Read()) { @@ -233,7 +233,7 @@ public class String2DArrayConverter : JsonConverter if (reader.TokenType == JsonTokenType.PropertyName) { - string propertyName = reader.GetString(); + string? propertyName = reader.GetString(); reader.Read(); switch (propertyName) @@ -371,13 +371,13 @@ public class UnivariateDistributionConverter : JsonConverter 0) + if (distribution != null! && parameters != null && parameters.Length > 0) { distribution.SetParameters(parameters); } - return distribution; + return distribution!; } catch { // If we can't recreate it, return null - return null; + return null!; } } @@ -443,7 +443,7 @@ public override UnivariateDistributionBase Read(ref Utf8JsonReader reader, Type /// public override void Write(Utf8JsonWriter writer, UnivariateDistributionBase value, JsonSerializerOptions options) { - if (value == null) + if (value == null!) { writer.WriteNullValue(); return; diff --git a/Numerics/Utilities/SafeProgressReporter.cs b/Numerics/Utilities/SafeProgressReporter.cs index 2e1c4973..b2817074 100644 --- a/Numerics/Utilities/SafeProgressReporter.cs +++ b/Numerics/Utilities/SafeProgressReporter.cs @@ -79,12 +79,12 @@ public SafeProgressReporter(string taskName) private double _previousProgress = -0.0000000000001d; private string _previousMessage = ""; private MessageType _previousMessageType = MessageType.Status; - private Process _externalProcess; + private Process _externalProcess = null!; private List _subProgReporterCollection = new List(); private CancellationTokenSource _cancellationTokenSource = new CancellationTokenSource(); protected readonly SendOrPostCallback _invokeProgressHandlers; protected readonly SendOrPostCallback _invokeMessageHandlers; - protected SynchronizationContext _synchronizationContext; + protected SynchronizationContext? _synchronizationContext; /// /// Returns the most recent progress. @@ -104,7 +104,7 @@ public SafeProgressReporter(string taskName) /// /// Returns the task name. /// - public string TaskName { get; private set; } + public string TaskName { get; private set; } = null!; /// /// Returns the most recent message type. @@ -132,7 +132,7 @@ public ReadOnlyCollection ChildReporters /// /// Event is raised when the progress is reported. /// - public event ProgressReportedEventHandler ProgressReported; + public event ProgressReportedEventHandler ProgressReported = null!; /// /// Delegate for handling progress reported events. @@ -145,7 +145,7 @@ public ReadOnlyCollection ChildReporters /// /// Event is raised when a message is reported. /// - public event MessageReportedEventHandler MessageReported; + public event MessageReportedEventHandler MessageReported = null!; /// /// Delegate for handling message reported events. @@ -156,7 +156,7 @@ public ReadOnlyCollection ChildReporters /// /// Event is raised when the task starts. /// - public event TaskStartedEventHandler TaskStarted; + public event TaskStartedEventHandler TaskStarted = null!; /// /// Delegate for handling task started events. @@ -166,7 +166,7 @@ public ReadOnlyCollection ChildReporters /// /// Event is raised when the task ended. /// - public event TaskEndedEventHandler TaskEnded; + public event TaskEndedEventHandler TaskEnded = null!; /// /// Delegate for handling task ended events. @@ -176,7 +176,7 @@ public ReadOnlyCollection ChildReporters /// /// Event is raised when a child reporter is created. /// - public event ChildReporterCreatedEventHandler ChildReporterCreated; + public event ChildReporterCreatedEventHandler ChildReporterCreated = null!; /// /// Delegate for handling child reporter created events. @@ -287,9 +287,9 @@ public void ReportError(string message) /// Invokes the progress handlers. /// /// The object. - private void InvokeProgressHandlers(object state) + private void InvokeProgressHandlers(object? state) { - double prog = ((double[])state)[0]; + double prog = ((double[])state!)[0]; double prevProg = ((double[])state)[1]; if (prevProg < 0d) prevProg = 0d; @@ -301,9 +301,9 @@ private void InvokeProgressHandlers(object state) /// Invokes the message handlers. /// /// The object. - private void InvokeMessageHandlers(object state) + private void InvokeMessageHandlers(object? state) { - MessageContentStruct prog = (MessageContentStruct)state; + MessageContentStruct prog = (MessageContentStruct)state!; OnMessageReported(prog); MessageReported?.Invoke(prog); } @@ -353,7 +353,7 @@ public SafeProgressReporter CreateProgressModifier(float fractionOfTotal, string if (string.IsNullOrEmpty(subTaskName)) subTaskName = TaskName; var child = new SafeProgressReporter(subTaskName); - child.SetContext(_synchronizationContext); + child.SetContext(_synchronizationContext!); child._previousProgress = 0d; child.ProgressReported += (reporter, prog, progDelta) => ReportProgress(_previousProgress + progDelta * fractionOfTotal); child.MessageReported += msg => ReportMessage(msg); diff --git a/Numerics/Utilities/Tools.cs b/Numerics/Utilities/Tools.cs index 9479887e..5cad3c99 100644 --- a/Numerics/Utilities/Tools.cs +++ b/Numerics/Utilities/Tools.cs @@ -783,7 +783,7 @@ public static double[] Sequence(double start, double end, double step = 1) /// An array of bytes. public static byte[] Compress(byte[] data) { - if (data is null) return null; + if (data is null) return null!; var output = new MemoryStream(); using (var dstream = new DeflateStream(output, CompressionLevel.Optimal)) { @@ -798,7 +798,7 @@ public static byte[] Compress(byte[] data) /// An array of bytes. public static byte[] Decompress(byte[] data) { - if (data is null) return null; + if (data is null) return null!; var input = new MemoryStream(data); var output = new MemoryStream(); using (var dstream = new DeflateStream(input, CompressionMode.Decompress)) diff --git a/Test_Numerics/Data/Interpolation/Test_Bilinear.cs b/Test_Numerics/Data/Interpolation/Test_Bilinear.cs index 0c710915..540ff239 100644 --- a/Test_Numerics/Data/Interpolation/Test_Bilinear.cs +++ b/Test_Numerics/Data/Interpolation/Test_Bilinear.cs @@ -93,7 +93,7 @@ public void Test_BiLinear() double x1 = 350d; double x2 = 75d; double y = bilinear.Interpolate(x1, x2); - Assert.AreEqual(y, 874.84d, 1E-6); + Assert.AreEqual(874.84d, y, 1E-6); } /// @@ -136,31 +136,31 @@ public void Test_Log() var LogLinLin = new Bilinear(x1Array, x2Array, yArray) { X1Transform = Transform.Logarithmic }; double y1 = LogLinLin.Interpolate(x1, x2); - Assert.AreEqual(y1, 874.909523653025d, 1E-6); + Assert.AreEqual(874.909523653025d, y1, 1E-6); var LinLogLin = new Bilinear(x1Array, x2Array, yArray) { X2Transform = Transform.Logarithmic }; double y2 = LinLogLin.Interpolate(x1, x2); - Assert.AreEqual(y2, 875.919023759159d, 1E-6); + Assert.AreEqual(875.919023759159d, y2, 1E-6); var LinLinLog = new Bilinear(x1Array, x2Array, yArray) { YTransform = Transform.Logarithmic }; double y3 = LinLinLog.Interpolate(x1, x2); - Assert.AreEqual(y3, 874.8164, 1E-4); + Assert.AreEqual(874.8164, y3, 1E-4); var LinLogLog = new Bilinear(x1Array, x2Array, yArray) { X2Transform = Transform.Logarithmic, YTransform = Transform.Logarithmic }; double y4 = LinLogLog.Interpolate(x1, x2); - Assert.AreEqual(y4, 875.896104342695d, 1E-6); + Assert.AreEqual(875.896104342695d, y4, 1E-6); var LogLogLin = new Bilinear(x1Array, x2Array, yArray) { X1Transform = Transform.Logarithmic, X2Transform = Transform.Logarithmic }; double y5 = LogLogLin.Interpolate(x1, x2); - Assert.AreEqual(y5, 875.9855, 1E-4); + Assert.AreEqual(875.9855, y5, 1E-4); var LogLinLog = new Bilinear(x1Array, x2Array, yArray) { X1Transform = Transform.Logarithmic, YTransform = Transform.Logarithmic }; double y6 = LogLinLog.Interpolate(x1, x2); - Assert.AreEqual(y6, 874.886, 1E-4); + Assert.AreEqual(874.886, y6, 1E-4); var LogLogLog = new Bilinear(x1Array, x2Array, yArray) { X1Transform = Transform.Logarithmic, X2Transform = Transform.Logarithmic, YTransform = Transform.Logarithmic }; double y7 = LogLogLog.Interpolate(x1, x2); - Assert.AreEqual(y7, 875.962713889793d, 1E-6); + Assert.AreEqual(875.962713889793d, y7, 1E-6); } @@ -204,15 +204,15 @@ public void Test_Z_X() var ZLinLin = new Bilinear(x1Array, x2Array, yArray) { X1Transform = Transform.NormalZ }; double y1 = ZLinLin.Interpolate(x1, x2); - Assert.AreEqual(y1, 890.8358, 1E-4); + Assert.AreEqual(890.8358, y1, 1E-4); var LinZLin = new Bilinear(x1Array, x2Array, yArray) { X2Transform = Transform.NormalZ }; double y2 = LinZLin.Interpolate(x1, x2); - Assert.AreEqual(y2, 890.7267, 1E-4); + Assert.AreEqual(890.7267, y2, 1E-4); var ZZLin = new Bilinear(x1Array, x2Array, yArray) { X1Transform = Transform.NormalZ, X2Transform = Transform.NormalZ }; double y3 = ZZLin.Interpolate(x1, x2); - Assert.AreEqual(y3, 890.6835, 1E-4); + Assert.AreEqual(890.6835, y3, 1E-4); } /// @@ -255,19 +255,19 @@ public void Test_Z_Y() var LinLinZ = new Bilinear(x1Array, x2Array, yArray) { YTransform = Transform.NormalZ }; double y1 = LinLinZ.Interpolate(x1, x2); - Assert.AreEqual(y1, 0.9596228, 1E-6); + Assert.AreEqual(0.9596228, y1, 1E-6); var LinZZ = new Bilinear(x1Array, x2Array, yArray) { X2Transform = Transform.NormalZ, YTransform = Transform.NormalZ }; double y2 = LinZZ.Interpolate(x1, x2); - Assert.AreEqual(y2, 0.95946, 1E-6); + Assert.AreEqual(0.95946, y2, 1E-6); var ZLinZ = new Bilinear(x1Array, x2Array, yArray) { X1Transform = Transform.NormalZ, YTransform = Transform.NormalZ }; double y3 = ZLinZ.Interpolate(x1, x2); - Assert.AreEqual(y3, 0.9595799, 1E-6); + Assert.AreEqual(0.9595799, y3, 1E-6); var ZZZ = new Bilinear(x1Array, x2Array, yArray) { X1Transform = Transform.NormalZ, X2Transform = Transform.NormalZ, YTransform = Transform.NormalZ }; double y4 = ZZZ.Interpolate(x1, x2); - Assert.AreEqual(y4, 0.9594168, 1E-6); + Assert.AreEqual(0.9594168, y4, 1E-6); } /// @@ -311,46 +311,46 @@ public void Test_BilinearEdgeCases() double x1 = 50; double x2 = 28; double y = bilinear.Interpolate(x1, x2); - Assert.AreEqual(y, 850.36, 1E-6); + Assert.AreEqual(850.36, y, 1E-6); // Top Right x1 = 50; x2 = 300; y = bilinear.Interpolate(x1, x2); - Assert.AreEqual(y, 928.87, 1E-6); + Assert.AreEqual(928.87, y, 1E-6); // Bottom Left x1 = 600; x2 = 25; y = bilinear.Interpolate(x1, x2); - Assert.AreEqual(y, 871.84, 1E-6); + Assert.AreEqual(871.84, y, 1E-6); // Bottom Right x1 = 600; x2 = 300; y = bilinear.Interpolate(x1, x2); - Assert.AreEqual(y, 929.68, 1E-6); + Assert.AreEqual(929.68, y, 1E-6); // Ascending - x1 out // Top x1 = 50; x2 = 75; y = bilinear.Interpolate(x1, x2); - Assert.AreEqual(y, 859.405, 1E-6); + Assert.AreEqual(859.405, y, 1E-6); // Bottom x1 = 600; x2 = 225; y = bilinear.Interpolate(x1, x2); - Assert.AreEqual(y, 924.93, 1E-6); + Assert.AreEqual(924.93, y, 1E-6); // Ascending - x2 out // Top x1 = 125; x2 = 25; y = bilinear.Interpolate(x1, x2); - Assert.AreEqual(y, 854.11750, 1E-6); + Assert.AreEqual(854.11750, y, 1E-6); // Bottom x1 = 450; x2 = 300; y = bilinear.Interpolate(x1, x2); - Assert.AreEqual(y, 929.65000, 1E-6); + Assert.AreEqual(929.65000, y, 1E-6); } } diff --git a/Test_Numerics/Data/Interpolation/Test_CubicSpline.cs b/Test_Numerics/Data/Interpolation/Test_CubicSpline.cs index 18f1e3b1..51faa09a 100644 --- a/Test_Numerics/Data/Interpolation/Test_CubicSpline.cs +++ b/Test_Numerics/Data/Interpolation/Test_CubicSpline.cs @@ -66,12 +66,12 @@ public void Test_Sequential() values[i - 1] = i; var spline = new CubicSpline(values, values); var lo = spline.SequentialSearch(872.5d); - Assert.AreEqual(lo, 871); + Assert.AreEqual(871,lo); Array.Reverse(values); var spline2 = new CubicSpline(values, values, SortOrder.Descending); lo = spline2.SequentialSearch(872.5); - Assert.AreEqual(lo, 127); + Assert.AreEqual(127, lo); } /// @@ -85,12 +85,12 @@ public void Test_Bisection() values[i - 1] = i; var spline = new CubicSpline(values, values); var lo = spline.BisectionSearch(872.5d); - Assert.AreEqual(lo, 871); + Assert.AreEqual(871, lo); Array.Reverse(values); var spline2 = new CubicSpline(values, values, SortOrder.Descending); lo = spline2.BisectionSearch(872.5); - Assert.AreEqual(lo, 127); + Assert.AreEqual(127,lo); } /// @@ -104,12 +104,12 @@ public void Test_Hunt() values[i - 1] = i; var spline = new CubicSpline(values, values); var lo = spline.HuntSearch(872.5d); - Assert.AreEqual(lo, 871); + Assert.AreEqual(871,lo); Array.Reverse(values); var spline2 = new CubicSpline(values, values, SortOrder.Descending); lo = spline2.HuntSearch(872.5); - Assert.AreEqual(lo, 127); + Assert.AreEqual(127, lo); } /// @@ -123,7 +123,7 @@ public void Test_Interpolate() var spline = new CubicSpline(XArray, YArray); double X = 8d; double Y = spline.Interpolate(X); - Assert.AreEqual(Y, 11.4049889205445d, 1E-6); + Assert.AreEqual(11.4049889205445d, Y, 1E-6); } /// diff --git a/Test_Numerics/Data/Interpolation/Test_Linear.cs b/Test_Numerics/Data/Interpolation/Test_Linear.cs index f126a074..646c28c3 100644 --- a/Test_Numerics/Data/Interpolation/Test_Linear.cs +++ b/Test_Numerics/Data/Interpolation/Test_Linear.cs @@ -65,12 +65,12 @@ public void Test_Sequential() values[i - 1] = i; var LI = new Linear(values, values); var lo = LI.SequentialSearch(872.5d); - Assert.AreEqual(lo, 871); + Assert.AreEqual(871,lo); Array.Reverse(values); LI = new Linear(values, values, SortOrder.Descending); lo = LI.SequentialSearch(872.5); - Assert.AreEqual(lo, 127); + Assert.AreEqual(127,lo); } /// @@ -84,12 +84,12 @@ public void Test_Bisection() values[i - 1] = i; var LI = new Linear(values, values); var lo = LI.BisectionSearch(872.5d); - Assert.AreEqual(lo, 871); + Assert.AreEqual(871, lo); Array.Reverse(values); LI = new Linear(values, values, SortOrder.Descending); lo = LI.BisectionSearch(872.5); - Assert.AreEqual(lo, 127); + Assert.AreEqual(127, lo); } /// @@ -103,12 +103,12 @@ public void Test_Hunt() values[i - 1] = i; var LI = new Linear(values, values); var lo = LI.HuntSearch(872.5d); - Assert.AreEqual(lo, 871); + Assert.AreEqual(871, lo); Array.Reverse(values); LI = new Linear(values, values, SortOrder.Descending); lo = LI.HuntSearch(872.5); - Assert.AreEqual(lo, 127); + Assert.AreEqual(127, lo); } /// @@ -122,7 +122,7 @@ public void Test_Lin() var LI = new Linear(XArray, YArray); double X = 75d; double Y = LI.Interpolate(X); - Assert.AreEqual(Y, 150.0d, 1E-6); + Assert.AreEqual(150.0d, Y, 1E-6); } /// @@ -155,15 +155,15 @@ public void Test_Log() var LinLog = new Linear(XArray, YArray) { YTransform = Transform.Logarithmic }; double Y1 = LinLog.Interpolate(X); - Assert.AreEqual(Y1, 141.42135623731d, 1E-6); + Assert.AreEqual(141.42135623731d, Y1, 1E-6); var LogLin = new Linear(XArray, YArray) { XTransform = Transform.Logarithmic }; double Y2 = LogLin.Interpolate(X); - Assert.AreEqual(Y2, 158.496250072116d, 1E-6); + Assert.AreEqual(158.496250072116d, Y2, 1E-6); ; var LogLog = new Linear(XArray, YArray) { XTransform = Transform.Logarithmic, YTransform = Transform.Logarithmic }; double Y3 = LogLog.Interpolate(X); - Assert.AreEqual(Y3, 150.0d, 1E-6); + Assert.AreEqual(150.0d, Y3, 1E-6); } /// @@ -214,15 +214,15 @@ public void Test_Z() var LinZ = new Linear(XArray, YArray) { YTransform = Transform.NormalZ }; double Y1 = LinZ.Interpolate(X); - Assert.AreEqual(Y1, 0.358762529d, 1E-6); + Assert.AreEqual(0.358762529d, Y1, 1E-6); var ZLin = new Linear(XArray, YArray) { XTransform = Transform.NormalZ }; double Y2 = ZLin.Interpolate(X); - Assert.AreEqual(Y2, 0.362146174d, 1E-6); + Assert.AreEqual(0.362146174d, Y2, 1E-6); var ZZ = new Linear(XArray, YArray) { XTransform = Transform.NormalZ, YTransform = Transform.NormalZ }; double Y3 = ZZ.Interpolate(X); - Assert.AreEqual(Y3, 0.36093855992815d, 1E-6); + Assert.AreEqual(0.36093855992815d, Y3, 1E-6); } /// @@ -274,7 +274,7 @@ public void Test_RevLin() var LI = new Linear(XArray, YArray, SortOrder.Descending); double X = 75d; double Y = LI.Interpolate(X); - Assert.AreEqual(Y, 150.0d, 1E-6); + Assert.AreEqual(150.0d, Y, 1E-6); } /// @@ -292,15 +292,15 @@ public void Test_Rev_Log() var LinLog = new Linear(XArray, YArray, SortOrder.Descending) { YTransform = Transform.Logarithmic }; double Y1 = LinLog.Interpolate(X); - Assert.AreEqual(Y1, 141.42135623731d, 1E-6); + Assert.AreEqual(141.42135623731d, Y1, 1E-6); var LogLin = new Linear(XArray, YArray, SortOrder.Descending) { XTransform = Transform.Logarithmic }; double Y2 = LogLin.Interpolate(X); - Assert.AreEqual(Y2, 158.496250072116d, 1E-6); + Assert.AreEqual(158.496250072116d, Y2, 1E-6); var LogLog = new Linear(XArray, YArray, SortOrder.Descending) { XTransform = Transform.Logarithmic, YTransform = Transform.Logarithmic }; double Y3 = LogLog.Interpolate(X); - Assert.AreEqual(Y3, 150.0d, 1E-6); + Assert.AreEqual(150.0d, Y3, 1E-6); } /// @@ -318,15 +318,15 @@ public void Test_Rev_Z() var LinZ = new Linear(XArray, YArray, SortOrder.Descending) { YTransform = Transform.NormalZ }; double Y1 = LinZ.Interpolate(X); - Assert.AreEqual(Y1, 0.358762529d, 1E-6); + Assert.AreEqual(0.358762529d, Y1, 1E-6); var ZLin = new Linear(XArray, YArray, SortOrder.Descending) { XTransform = Transform.NormalZ }; double Y2 = ZLin.Interpolate(X); - Assert.AreEqual(Y2, 0.362146174d, 1E-6); + Assert.AreEqual(0.362146174d, Y2, 1E-6); var ZZ = new Linear(XArray, YArray, SortOrder.Descending) { XTransform = Transform.NormalZ, YTransform = Transform.NormalZ }; double Y3 = ZZ.Interpolate(X); - Assert.AreEqual(Y3, 0.36093855992815d, 1E-6); + Assert.AreEqual(0.36093855992815d, Y3, 1E-6); } // ??? diff --git a/Test_Numerics/Data/Interpolation/Test_Polynomial.cs b/Test_Numerics/Data/Interpolation/Test_Polynomial.cs index f9c291fc..be9e710e 100644 --- a/Test_Numerics/Data/Interpolation/Test_Polynomial.cs +++ b/Test_Numerics/Data/Interpolation/Test_Polynomial.cs @@ -66,12 +66,12 @@ public void Test_Sequential() values[i - 1] = i; var poly = new Polynomial(3, values, values); var lo = poly.SequentialSearch(872.5d); - Assert.AreEqual(lo, 871); + Assert.AreEqual(871, lo ); Array.Reverse(values); var poly2 = new Polynomial(3, values, values, SortOrder.Descending); lo = poly2.SequentialSearch(872.5); - Assert.AreEqual(lo, 127); + Assert.AreEqual(127,lo); } /// @@ -85,12 +85,12 @@ public void Test_Bisection() values[i - 1] = i; var poly = new Polynomial(3, values, values); var lo = poly.BisectionSearch(872.5d); - Assert.AreEqual(lo, 871); + Assert.AreEqual(871,lo); Array.Reverse(values); var poly2 = new Polynomial(3, values, values, SortOrder.Descending); lo = poly2.BisectionSearch(872.5); - Assert.AreEqual(lo, 127); + Assert.AreEqual(127,lo); } /// @@ -104,12 +104,12 @@ public void Test_Hunt() values[i - 1] = i; var poly = new Polynomial(3, values, values); var lo = poly.HuntSearch(872.5d); - Assert.AreEqual(lo, 871); + Assert.AreEqual(871,lo); Array.Reverse(values); var poly2 = new Polynomial(3, values, values, SortOrder.Descending); lo = poly2.HuntSearch(872.5); - Assert.AreEqual(lo, 127); + Assert.AreEqual(127,lo); } /// @@ -123,7 +123,7 @@ public void Test_Interpolate_Order3() var poly = new Polynomial(3, XArray, YArray); double X = 8d; double Y = poly.Interpolate(X); - Assert.AreEqual(Y, 11.5415808882467, 1E-6); + Assert.AreEqual(11.5415808882467, Y, 1E-6); } /// diff --git a/Test_Numerics/Data/Paired Data/Test_Ordinate.cs b/Test_Numerics/Data/Paired Data/Test_Ordinate.cs index 2bcb3f4d..a3437a3a 100644 --- a/Test_Numerics/Data/Paired Data/Test_Ordinate.cs +++ b/Test_Numerics/Data/Paired Data/Test_Ordinate.cs @@ -71,14 +71,14 @@ public void Test_Construction() var ordinate4 = new Ordinate(double.NaN, 4); Assert.AreEqual(ordinate1, ordinate2); - Assert.AreEqual(ordinate1.X, 2); - Assert.AreEqual(ordinate1.Y, 4); - Assert.AreEqual(ordinate2.X, 2); - Assert.AreEqual(ordinate2.Y, 4); + Assert.AreEqual(2,ordinate1.X); + Assert.AreEqual(4, ordinate1.Y); + Assert.AreEqual(2, ordinate2.X); + Assert.AreEqual(4, ordinate2.Y); - Assert.AreEqual(ordinate1.IsValid, true); - Assert.AreEqual(ordinate3.IsValid, false); - Assert.AreEqual(ordinate4.IsValid, false); + Assert.IsTrue(ordinate1.IsValid); + Assert.IsFalse(ordinate3.IsValid); + Assert.IsFalse(ordinate4.IsValid); Assert.AreNotEqual(ordinate1, ordinate3); diff --git a/Test_Numerics/Data/Paired Data/Test_PairedDataInterpolation.cs b/Test_Numerics/Data/Paired Data/Test_PairedDataInterpolation.cs index 8f198375..5ff08e81 100644 --- a/Test_Numerics/Data/Paired Data/Test_PairedDataInterpolation.cs +++ b/Test_Numerics/Data/Paired Data/Test_PairedDataInterpolation.cs @@ -56,10 +56,10 @@ public void Test_Sequential() opd.Add(new Ordinate(i, i)); // X var lo = opd.SequentialSearchX(872.5d); - Assert.AreEqual(lo, 871); + Assert.AreEqual(871,lo); // Y lo = opd.SequentialSearchY(872.5d); - Assert.AreEqual(lo, 871); + Assert.AreEqual(871, lo); // DSC opd = new OrderedPairedData(true, SortOrder.Descending, false, SortOrder.Descending); @@ -67,10 +67,10 @@ public void Test_Sequential() opd.Add(new Ordinate(i, i)); // X lo = opd.SequentialSearchX(872.5d); - Assert.AreEqual(lo, 127); + Assert.AreEqual(127, lo); // Y lo = opd.SequentialSearchY(872.5d); - Assert.AreEqual(lo, 127); + Assert.AreEqual(127, lo); } @@ -86,10 +86,10 @@ public void Test_Bisection() opd.Add(new Ordinate(i, i)); // X var lo = opd.BisectionSearchX(872.5d); - Assert.AreEqual(lo, 871); + Assert.AreEqual(871, lo); // Y lo = opd.BisectionSearchY(872.5d); - Assert.AreEqual(lo, 871); + Assert.AreEqual(871, lo); // DSC opd = new OrderedPairedData(true, SortOrder.Descending, false, SortOrder.Descending); @@ -97,10 +97,10 @@ public void Test_Bisection() opd.Add(new Ordinate(i, i)); // X lo = opd.BisectionSearchX(872.5d); - Assert.AreEqual(lo, 127); + Assert.AreEqual(127, lo); // Y lo = opd.BisectionSearchY(872.5d); - Assert.AreEqual(lo, 127); + Assert.AreEqual(127, lo); } @@ -116,10 +116,10 @@ public void Test_Hunt() opd.Add(new Ordinate(i, i)); // X var lo = opd.HuntSearchX(872.5d); - Assert.AreEqual(lo, 871); + Assert.AreEqual(871, lo); // Y lo = opd.HuntSearchY(872.5d); - Assert.AreEqual(lo, 871); + Assert.AreEqual(871, lo); // DSC opd = new OrderedPairedData(true, SortOrder.Descending, false, SortOrder.Descending); @@ -127,10 +127,10 @@ public void Test_Hunt() opd.Add(new Ordinate(i, i)); // X lo = opd.HuntSearchX(872.5d); - Assert.AreEqual(lo, 127); + Assert.AreEqual(127, lo); // Y lo = opd.HuntSearchY(872.5d); - Assert.AreEqual(lo, 127); + Assert.AreEqual(127, lo); } @@ -147,7 +147,7 @@ public void Test_Lin() var opd = new OrderedPairedData(XArray, YArray, true, SortOrder.Ascending, true, SortOrder.Ascending); double X = 75d; double Y = opd.GetYFromX(X); - Assert.AreEqual(Y, 150.0d, 1E-6); + Assert.AreEqual(150.0d, Y, 1E-6); // Given Y var xFromY = opd.GetXFromY(Y); @@ -168,7 +168,7 @@ public void Test_LinLog() var opd = new OrderedPairedData(XArray, YArray, true, SortOrder.Ascending, true, SortOrder.Ascending); double X = 75d; double Y = opd.GetYFromX(X, Transform.None, Transform.Logarithmic); - Assert.AreEqual(Y, 141.42135623731d, 1E-6); + Assert.AreEqual(141.42135623731d, Y, 1E-6); // Given Y var xFromY = opd.GetXFromY(Y, Transform.None, Transform.Logarithmic); @@ -188,7 +188,7 @@ public void Test_LogLin() var opd = new OrderedPairedData(XArray, YArray, true, SortOrder.Ascending, true, SortOrder.Ascending); double X = 75d; double Y = opd.GetYFromX(X, Transform.Logarithmic, Transform.None); - Assert.AreEqual(Y, 158.496250072116d, 1E-6); + Assert.AreEqual(158.496250072116d, Y, 1E-6); // Given Y var xFromY = opd.GetXFromY(Y, Transform.Logarithmic, Transform.None); @@ -208,7 +208,7 @@ public void Test_LogLog() var opd = new OrderedPairedData(XArray, YArray, true, SortOrder.Ascending, true, SortOrder.Ascending); double X = 75d; double Y = opd.GetYFromX(X, Transform.Logarithmic, Transform.Logarithmic); - Assert.AreEqual(Y, 150.0d, 1E-6); + Assert.AreEqual(150.0d, Y, 1E-6); // Given Y var xFromY = opd.GetXFromY(Y, Transform.Logarithmic, Transform.Logarithmic); @@ -228,7 +228,7 @@ public void Test_LinZ() var opd = new OrderedPairedData(XArray, YArray, true, SortOrder.Ascending, true, SortOrder.Ascending); double X = 0.18d; double Y = opd.GetYFromX(X, Transform.None, Transform.NormalZ); - Assert.AreEqual(Y, 0.358762529d, 1E-6); + Assert.AreEqual(0.358762529d, Y, 1E-6); // Given Y var xFromY = opd.GetXFromY(Y, Transform.None, Transform.NormalZ); @@ -248,7 +248,7 @@ public void Test_ZLin() var opd = new OrderedPairedData(XArray, YArray, true, SortOrder.Ascending, true, SortOrder.Ascending); double X = 0.18d; double Y = opd.GetYFromX(X, Transform.NormalZ, Transform.None); - Assert.AreEqual(Y, 0.362146174d, 1E-6); + Assert.AreEqual(0.362146174d, Y, 1E-6); // Given Y var xFromY = opd.GetXFromY(Y, Transform.NormalZ, Transform.None); @@ -268,7 +268,7 @@ public void Test_ZZ() var opd = new OrderedPairedData(XArray, YArray, true, SortOrder.Ascending, true, SortOrder.Ascending); double X = 0.18d; double Y = opd.GetYFromX(X, Transform.NormalZ, Transform.NormalZ); - Assert.AreEqual(Y, 0.36093855992815d, 1E-6); + Assert.AreEqual(0.36093855992815d, Y, 1E-6); // Given Y var xFromY = opd.GetXFromY(Y, Transform.NormalZ, Transform.NormalZ); @@ -290,7 +290,7 @@ public void Test_RevLinear() var opd = new OrderedPairedData(XArray, YArray, true, SortOrder.Descending, true, SortOrder.Descending); double X = 75d; double Y = opd.GetYFromX(X); - Assert.AreEqual(Y, 150.0d, 1E-6); + Assert.AreEqual(150.0d, Y, 1E-6); // Given Y var xFromY = opd.GetXFromY(Y); @@ -312,7 +312,7 @@ public void Test_RevLinLog() var opd = new OrderedPairedData(XArray, YArray, true, SortOrder.Descending, true, SortOrder.Descending); double X = 75d; double Y = opd.GetYFromX(X, Transform.None, Transform.Logarithmic); - Assert.AreEqual(Y, 141.42135623731d, 1E-6); + Assert.AreEqual(141.42135623731d, Y, 1E-6); // Given Y var xFromY = opd.GetXFromY(Y, Transform.None, Transform.Logarithmic); @@ -334,7 +334,7 @@ public void Test_RevLogLin() var opd = new OrderedPairedData(XArray, YArray, true, SortOrder.Descending, true, SortOrder.Descending); double X = 75d; double Y = opd.GetYFromX(X, Transform.Logarithmic, Transform.None); - Assert.AreEqual(Y, 158.496250072116d, 1E-6); + Assert.AreEqual(158.496250072116d, Y, 1E-6); // Given Y var xFromY = opd.GetXFromY(Y, Transform.Logarithmic, Transform.None); @@ -356,7 +356,7 @@ public void Test_RevLogLog() var opd = new OrderedPairedData(XArray, YArray, true, SortOrder.Descending, true, SortOrder.Descending); double X = 75d; double Y = opd.GetYFromX(X, Transform.Logarithmic, Transform.Logarithmic); - Assert.AreEqual(Y, 150.0d, 1E-6); + Assert.AreEqual(150.0d, Y, 1E-6); // Given Y var xFromY = opd.GetXFromY(Y, Transform.Logarithmic, Transform.Logarithmic); @@ -378,7 +378,7 @@ public void Test_RevLinZ() var opd = new OrderedPairedData(XArray, YArray, true, SortOrder.Descending, true, SortOrder.Descending); double X = 0.18d; double Y = opd.GetYFromX(X, Transform.None, Transform.NormalZ); - Assert.AreEqual(Y, 0.358762529d, 1E-6); + Assert.AreEqual(0.358762529d, Y, 1E-6); // Given Y var xFromY = opd.GetXFromY(Y, Transform.None, Transform.NormalZ); @@ -400,7 +400,7 @@ public void Test_RevZLin() var opd = new OrderedPairedData(XArray, YArray, true, SortOrder.Descending, true, SortOrder.Descending); double X = 0.18d; double Y = opd.GetYFromX(X, Transform.NormalZ, Transform.None); - Assert.AreEqual(Y, 0.362146174d, 1E-6); + Assert.AreEqual(0.362146174d, Y, 1E-6); // Given Y var xFromY = opd.GetXFromY(Y, Transform.NormalZ, Transform.None); @@ -422,7 +422,7 @@ public void Test_RevZZ() var opd = new OrderedPairedData(XArray, YArray, true, SortOrder.Descending, true, SortOrder.Descending); double X = 0.18d; double Y = opd.GetYFromX(X, Transform.NormalZ, Transform.NormalZ); - Assert.AreEqual(Y, 0.36093855992815d, 1E-6); + Assert.AreEqual(0.36093855992815d, Y, 1E-6); // Given Y var xFromY = opd.GetXFromY(Y, Transform.NormalZ, Transform.NormalZ); @@ -442,7 +442,7 @@ public void Test_Lin_List() var opd = new OrderedPairedData(XArray, YArray, true, SortOrder.Ascending, true, SortOrder.Ascending); double X = 75d; double Y = opd.GetYFromX(X); - Assert.AreEqual(Y, 150.0d, 1E-6); + Assert.AreEqual(150.0d, Y, 1E-6); var yVals = opd.GetYFromX(XArray); for (int i = 1; i < YArray.Length; i++) diff --git a/Test_Numerics/Data/Statistics/Test_BoxCox.cs b/Test_Numerics/Data/Statistics/Test_BoxCox.cs index 6c0ce5a6..2bcfbfa8 100644 --- a/Test_Numerics/Data/Statistics/Test_BoxCox.cs +++ b/Test_Numerics/Data/Statistics/Test_BoxCox.cs @@ -66,7 +66,7 @@ public void Test_Fit() var sample = new[] { 142.25d, 141.23d, 141.33d, 140.82d, 141.31d, 140.58d, 141.58d, 142.15d, 143.07d, 142.85d, 143.17d, 142.54d, 143.07d, 142.26d, 142.97d, 143.86d, 142.57d, 142.19d, 142.35d, 142.63d, 144.15d, 144.73d, 144.7d, 144.97d, 145.12d, 144.78d, 145.06d, 143.94d, 143.77d, 144.8d, 145.67d, 145.44d, 145.56d, 145.61d, 146.05d, 145.74d, 145.83d, 143.88d, 140.39d, 139.34d, 140.05d, 137.93d, 138.78d, 139.59d, 140.54d, 141.31d, 140.42d, 140.18d, 138.43d, 138.97d, 139.31d, 139.26d, 140.08d, 141.1d, 143.48d, 143.28d, 143.5d, 143.12d, 142.14d, 142.54d, 142.24d, 142.16d, 142.97d, 143.69d, 143.67d, 144.65d, 144.33d, 144.82d, 143.74d, 144.9d, 145.83d, 146.97d, 146.6d, 146.55d, 148.22d, 148.37d, 148.23d, 148.73d, 149.49d, 149.09d, 149.64d, 148.42d, 148.9d, 149.97d, 150.75d, 150.88d, 150.58d, 150.64d, 150.73d, 149.75d, 150.86d, 150.7d, 150.8d, 151.38d, 152.01d, 152.58d, 152.7d, 152.95d, 152.53d, 151.5d, 151.94d, 151.46d, 153.67d, 153.88d, 153.54d, 153.74d, 152.86d, 151.56d, 149.58d, 150.93d, 150.67d, 150.5d, 152.06d, 153.14d, 153.38d, 152.55d, 153.58d, 151.08d, 151.52d, 150.24d, 150.21d, 148.13d, 150.38d, 150.9d, 150.87d, 152.18d, 152.4d, 152.38d, 153.16d, 152.29d, 150.75d, 152.37d, 154.57d, 154.99d, 154.93d, 154.23d, 155.2d, 154.89d, 154.18d, 153.12d, 152.02d, 150.19d, 148.21d, 145.93d, 148.33d, 145.18d, 146.76d, 147.28d, 144.21d, 145.94d, 148.41d, 147.43d, 144.39d, 146.5d, 145.7d, 142.72d, 139.79d, 145.5d, 145.17d, 144.6d, 146.01d, 147.34d, 146.48d, 147.85d, 146.16d, 144.37d, 145.45d, 147.65d, 147.45d, 148.2d, 147.95d, 146.48d, 146.52d, 146.24d, 147.29d, 148.55d, 147.96d, 148.31d, 148.83d, 153.41d, 153.34d, 152.71d, 152.42d, 150.81d, 152.25d, 152.91d, 152.85d, 152.6d, 154.61d, 153.81d, 154.11d, 155.03d, 155.39d, 155.6d, 156.04d, 156.93d, 155.46d, 156.27d, 154.41d, 154.98d }; double l1 = 0d; BoxCox.FitLambda(sample, out l1); - Assert.AreEqual(l1, 1.670035d, 1E-4); + Assert.AreEqual(1.670035d, l1, 1E-4); } /// diff --git a/Test_Numerics/Data/Statistics/Test_HypothesisTests.cs b/Test_Numerics/Data/Statistics/Test_HypothesisTests.cs index c8e90605..bd37842b 100644 --- a/Test_Numerics/Data/Statistics/Test_HypothesisTests.cs +++ b/Test_Numerics/Data/Statistics/Test_HypothesisTests.cs @@ -80,7 +80,7 @@ public void Test_OneSampleTtest() Assert.AreEqual(p, true_p, 1E-4); var t = HypothesisTests.OneSampleTtest(new double[] { 23, 15, -5, 7, 1, -10, 12, -8, 20, 8, -2, -5 }); - Assert.AreEqual(t, 0.087585 * 2, 1E-6); + Assert.AreEqual(0.087585 * 2, t, 1E-6); } /// @@ -187,7 +187,7 @@ public void Test_JarqueBera() // known example var JB = HypothesisTests.JarqueBeraTest(new double[] { 4, 5, 5, 6, 9, 12, 13, 14, 14, 19, 22, 24, 25 }); - Assert.AreEqual(JB, 0.592128, 1E-6); + Assert.AreEqual(0.592128, JB, 1E-6); } diff --git a/Test_Numerics/Distributions/Multivariate/Test_MultivariateNormal.cs b/Test_Numerics/Distributions/Multivariate/Test_MultivariateNormal.cs index 98c1bb06..de4cced4 100644 --- a/Test_Numerics/Distributions/Multivariate/Test_MultivariateNormal.cs +++ b/Test_Numerics/Distributions/Multivariate/Test_MultivariateNormal.cs @@ -145,47 +145,47 @@ public void Test_MultivariateNormalCDF_R() // AB var p = mvn.CDF(new[] { Normal.StandardZ(0.25), Normal.StandardZ(0.35), double.PositiveInfinity, double.PositiveInfinity }); - Assert.AreEqual(p, 0.05011069, 1E-4); + Assert.AreEqual(0.05011069, p, 1E-4); // AC p = mvn.CDF(new[] { Normal.StandardZ(0.25), double.PositiveInfinity, Normal.StandardZ(0.5), double.PositiveInfinity }); - Assert.AreEqual(p, 0.0827451, 1E-4); + Assert.AreEqual(0.0827451, p, 1E-4); // AD p = mvn.CDF(new[] { Normal.StandardZ(0.25), double.PositiveInfinity, double.PositiveInfinity, Normal.StandardZ(0.5) }); - Assert.AreEqual(p, 0.0827451, 1E-4); + Assert.AreEqual(0.0827451, p, 1E-4); // BC p = mvn.CDF(new[] { double.PositiveInfinity, Normal.StandardZ(0.35), Normal.StandardZ(0.5), double.PositiveInfinity }); - Assert.AreEqual(p, 0.1254504, 1E-4); + Assert.AreEqual(0.1254504, p, 1E-4); // BD p = mvn.CDF(new[] { double.PositiveInfinity, Normal.StandardZ(0.35), double.PositiveInfinity, Normal.StandardZ(0.5) }); - Assert.AreEqual(p, 0.1254504, 1E-4); + Assert.AreEqual(0.1254504, p, 1E-4); // CD p = mvn.CDF(new[] { double.PositiveInfinity, double.PositiveInfinity, Normal.StandardZ(0.5), Normal.StandardZ(0.5) }); - Assert.AreEqual(p, 0.1964756, 1E-4); + Assert.AreEqual(0.1964756, p, 1E-4); // ABC p = mvn.CDF(new[] { Normal.StandardZ(0.25), Normal.StandardZ(0.35), Normal.StandardZ(0.5), double.PositiveInfinity }); - Assert.AreEqual(p, 0.005960125, 1E-4); + Assert.AreEqual(0.005960125, p, 1E-4); // ABD p = mvn.CDF(new[] { Normal.StandardZ(0.25), Normal.StandardZ(0.35), double.PositiveInfinity, Normal.StandardZ(0.5) }); - Assert.AreEqual(p, 0.005964513, 1E-4); + Assert.AreEqual(0.005964513, p, 1E-4); // ACD p = mvn.CDF(new[] { Normal.StandardZ(0.25), double.PositiveInfinity, Normal.StandardZ(0.5), Normal.StandardZ(0.5) }); - Assert.AreEqual(p, 0.0128066, 1E-4); + Assert.AreEqual(0.0128066, p, 1E-4); // BCD p = mvn.CDF(new[] { double.PositiveInfinity, Normal.StandardZ(0.35), Normal.StandardZ(0.5), Normal.StandardZ(0.5) }); - Assert.AreEqual(p, 0.02324389, 1E-4); + Assert.AreEqual(0.02324389, p, 1E-4); // ABCD p = mvn.CDF(new[] { Normal.StandardZ(0.25), Normal.StandardZ(0.35), Normal.StandardZ(0.5), Normal.StandardZ(0.5)}); - Assert.AreEqual(p, 3.593582e-13, 1E-4); + Assert.AreEqual(3.593582e-13, p, 1E-4); } /// @@ -206,7 +206,7 @@ public void Test_MultivariateNormalCDF_R_PerfectNegative() var mvn = new MultivariateNormal(mean, covar) { MVNUNI = new MersenneTwister(12345) }; var p = mvn.CDF(new[] { Normal.StandardZ(0.5), Normal.StandardZ(0.5), Normal.StandardZ(0.5) }); - Assert.AreEqual(p, 0.002740932, 1E-4); + Assert.AreEqual(0.002740932, p, 1E-4); } /// @@ -226,7 +226,7 @@ public void Test_MultivariateNormalCDF_R_PerfectPositive() var mvn = new MultivariateNormal(mean, covar) { MVNUNI = new MersenneTwister(12345) }; var p = mvn.CDF(new[] { Normal.StandardZ(0.5), Normal.StandardZ(0.5), Normal.StandardZ(0.5) }); - Assert.AreEqual(p, 0.4661416, 1E-4); + Assert.AreEqual(0.4661416, p, 1E-4); } @@ -246,7 +246,7 @@ public void Test_MultivariateNormalCDF_R_Independent() var mvn = new MultivariateNormal(mean, covar) { MVNUNI = new MersenneTwister(12345) }; var p = mvn.CDF(new[] { Normal.StandardZ(0.5), Normal.StandardZ(0.5), Normal.StandardZ(0.5) }); - Assert.AreEqual(p, 0.125, 1E-4); + Assert.AreEqual(0.125, p, 1E-4); } diff --git a/Test_Numerics/Distributions/Univariate/Test_ChiSquared.cs b/Test_Numerics/Distributions/Univariate/Test_ChiSquared.cs index 181b8ed8..5f626d9f 100644 --- a/Test_Numerics/Distributions/Univariate/Test_ChiSquared.cs +++ b/Test_Numerics/Distributions/Univariate/Test_ChiSquared.cs @@ -276,12 +276,12 @@ public void Test_InverseCDF() { var x = new ChiSquared(1); Assert.AreEqual(0.09999, x.InverseCDF(0.24817036595415071751), 1e-04); - Assert.AreEqual(1, x.InverseCDF(0.68268949213708589717), 1e-04); + Assert.AreEqual(1, x.InverseCDF(0.68268949213708589717), 1e-04); Assert.AreEqual(5.5, x.InverseCDF(0.9809835), 1e-04); var x2 = new ChiSquared(2); - Assert.AreEqual(0, x2.InverseCDF(0)); - Assert.AreEqual(0.1, x2.InverseCDF(0.04877057), 1e-04); + Assert.AreEqual(0,x2.InverseCDF(0)); + Assert.AreEqual(0.1, x2.InverseCDF(0.04877057),1e-04); Assert.AreEqual(1, x2.InverseCDF(0.3934693),1e-04); Assert.AreEqual(5.5, x2.InverseCDF(0.9360721), 1e-04); } diff --git a/Test_Numerics/Distributions/Univariate/Test_EmpiricalDistribution.cs b/Test_Numerics/Distributions/Univariate/Test_EmpiricalDistribution.cs index e26b6256..4406083c 100644 --- a/Test_Numerics/Distributions/Univariate/Test_EmpiricalDistribution.cs +++ b/Test_Numerics/Distributions/Univariate/Test_EmpiricalDistribution.cs @@ -270,8 +270,8 @@ public void Test_ConvolveFiveDistributions() Assert.AreEqual(convolved.Mean, convolved.Median, 0.5, "Median should be close to mean for symmetric distribution"); // Verify CDF properties - Assert.IsLessThanOrEqualTo( 0.01, convolved.CDF(convolved.Minimum) ); - Assert.IsGreaterThanOrEqualTo(0.99, convolved.CDF(convolved.Maximum)); + Assert.IsLessThanOrEqualTo(0.01,convolved.CDF(convolved.Minimum), "CDF at minimum should be close to 0"); + Assert.IsGreaterThanOrEqualTo(0.99,convolved.CDF(convolved.Maximum), "CDF at maximum should be close to 1"); } /// @@ -317,8 +317,8 @@ public void Test_ConvolveFiveDifferentDistributions() double meanError = Math.Abs(convolved.Mean - expectedMean) / expectedMean; double stdDevError = Math.Abs(convolved.StandardDeviation - expectedStdDev) / expectedStdDev; - Assert.IsLessThan(0.05, meanError); - Assert.IsLessThan(0.15, stdDevError); + Assert.IsLessThan(0.05,meanError, $"Mean error {meanError:P2} should be less than 5%"); + Assert.IsLessThan(0.15,stdDevError, $"StdDev error {stdDevError:P2} should be less than 15%"); // Verify range is reasonable double expectedMin = distributions.Sum(d => d.Minimum); diff --git a/Test_Numerics/Distributions/Univariate/Test_Exponential.cs b/Test_Numerics/Distributions/Univariate/Test_Exponential.cs index 36a32108..4e2ca62f 100644 --- a/Test_Numerics/Distributions/Univariate/Test_Exponential.cs +++ b/Test_Numerics/Distributions/Univariate/Test_Exponential.cs @@ -147,7 +147,7 @@ public void Test_EXP_Quantile() var EXP = new Exponential(27421d, 25200d); double q100 = EXP.InverseCDF(0.99d); double true_100 = 143471d; - Assert.IsLessThan(0.01d, (q100 - true_100) / true_100 ); + Assert.IsLessThan(0.01d, (q100 - true_100) / true_100); double p = EXP.CDF(q100); double true_p = 0.99d; Assert.AreEqual(p, true_p); @@ -172,7 +172,7 @@ public void Test_EXP_StandardError() var EXP = new Exponential(27421d, 25200d); double se100 = Math.Sqrt(EXP.QuantileVariance(0.99d, 85, ParameterEstimationMethod.MethodOfMoments)); double true_se100 = 15986d; - Assert.IsLessThan(0.01d, (se100 - true_se100) / true_se100); + Assert.IsLessThan(0.01d,(se100 - true_se100) / true_se100); // Maximum Likelihood EXP = new Exponential(12629d, 39991d); @@ -203,11 +203,11 @@ public void Test_EXP_Partials() public void Test_Construction() { var EXP = new Exponential(-5, 100); - Assert.AreEqual(-5, EXP.Xi); + Assert.AreEqual(-5,EXP.Xi); Assert.AreEqual(100,EXP.Alpha); var EXP2 = new Exponential(0, 1); - Assert.AreEqual(0, EXP2.Xi); + Assert.AreEqual(0,EXP2.Xi); Assert.AreEqual(1, EXP2.Alpha); } @@ -237,7 +237,7 @@ public void Test_InvalidParameters() public void Test_ParametersToString() { var EXP = new Exponential(1, 1); - Assert.AreEqual("Location (ξ)",EXP.ParametersToString[0, 0]); + Assert.AreEqual("Location (ξ)",EXP.ParametersToString[0, 0] ); Assert.AreEqual("Scale (α)", EXP.ParametersToString[1, 0]); Assert.AreEqual("1", EXP.ParametersToString[0, 1]); Assert.AreEqual("1", EXP.ParametersToString[1, 1]); @@ -349,7 +349,7 @@ public void Test_Minimum() public void Test_Maximum() { var EXP = new Exponential(0, 1); - Assert.AreEqual(double.PositiveInfinity, EXP.Maximum); + Assert.AreEqual(double.PositiveInfinity,EXP.Maximum); } /// diff --git a/Test_Numerics/Distributions/Univariate/Test_GammaDistribution.cs b/Test_Numerics/Distributions/Univariate/Test_GammaDistribution.cs index 1920df3c..eb90c7a7 100644 --- a/Test_Numerics/Distributions/Univariate/Test_GammaDistribution.cs +++ b/Test_Numerics/Distributions/Univariate/Test_GammaDistribution.cs @@ -75,8 +75,8 @@ public void Test_GammaDist_MOM() double lambda = G.Kappa; double trueA = 0.08317d; double trueL = 15.91188d; - Assert.IsLessThan(0.01d, (alpha - trueA) / trueA); - Assert.IsLessThan(0.01d, (lambda - trueL) / trueL); + Assert.IsLessThan(0.01d,(alpha - trueA) / trueA); + Assert.IsLessThan(0.01d,(lambda - trueL) / trueL); } [TestMethod()] @@ -93,9 +93,9 @@ public void Test_GammaDist_LMOM_Fit() Assert.AreEqual(scale, true_scale, 0.0001d); Assert.AreEqual(shape, true_shape, 0.0001d); var lmom = G.LinearMomentsFromParameters(G.GetParameters); - Assert.AreEqual(9.9575163d, lmom[0], 0.0001d); + Assert.AreEqual(9.9575163d, lmom[0], 0.0001d); Assert.AreEqual(1.9822363d, lmom[1], 0.0001d); - Assert.AreEqual(0.1175059d, lmom[2], 0.0001d); + Assert.AreEqual(0.1175059d,lmom[2], 0.0001d); Assert.AreEqual(0.1268391d, lmom[3], 0.0001d); } @@ -143,7 +143,7 @@ public void Test_GammaDist_Quantile() Assert.IsLessThan(0.01d, (q1000 - true_1000) / true_1000); double p = G.CDF(q1000); double true_p = 0.99d; - Assert.AreEqual(p,true_p); + Assert.AreEqual(p, true_p); } /// @@ -182,7 +182,7 @@ public void Test_Construction() { var G = new GammaDistribution(2, 10); Assert.AreEqual(2,G.Theta); - Assert.AreEqual(10, G.Kappa); + Assert.AreEqual(10,G.Kappa); var G2 = new GammaDistribution(-1, 4); Assert.AreEqual(-1,G2.Theta); @@ -197,10 +197,10 @@ public void Test_Construction() public void Test_Rate() { var G = new GammaDistribution(2, 2); - Assert.AreEqual(0.5,G.Rate); + Assert.AreEqual(0.5, G.Rate); var G2 = new GammaDistribution(); - Assert.AreEqual(0.1,G2.Rate); + Assert.AreEqual(0.1, G2.Rate); } /// @@ -210,10 +210,10 @@ public void Test_Rate() public void Test_ParametersToString() { var G = new GammaDistribution(); - Assert.AreEqual("Scale (θ)", G.ParametersToString[0, 0]); - Assert.AreEqual("Shape (κ)",G.ParametersToString[1, 0]); - Assert.AreEqual("10",G.ParametersToString[0, 1]); - Assert.AreEqual("2",G.ParametersToString[1, 1]); + Assert.AreEqual("Scale (θ)",G.ParametersToString[0, 0] ); + Assert.AreEqual("Shape (κ)", G.ParametersToString[1, 0]); + Assert.AreEqual("10", G.ParametersToString[0, 1]); + Assert.AreEqual("2", G.ParametersToString[1, 1]); } /// @@ -256,7 +256,7 @@ public void Test_Moments() public void Test_Mean() { var G = new GammaDistribution(); - Assert.AreEqual(20,G.Mean); + Assert.AreEqual(20, G.Mean); } /// @@ -302,7 +302,7 @@ public void Test_StandardDeviation() public void Test_Skewness() { var G = new GammaDistribution(); - Assert.AreEqual(1.4142135, G.Skewness, 1e-04); + Assert.AreEqual(1.4142135, G.Skewness, 1e-04); var G2 = new GammaDistribution(10, 100); Assert.AreEqual(0.2, G2.Skewness); @@ -321,7 +321,7 @@ public void Test_Kurtosis() Assert.AreEqual(4, G2.Kurtosis); var G3 = new GammaDistribution(10, 2.5); - Assert.AreEqual(5.4,G3.Kurtosis); + Assert.AreEqual(5.4, G3.Kurtosis); } /// @@ -331,7 +331,7 @@ public void Test_Kurtosis() public void Test_Minimum() { var G = new GammaDistribution(); - Assert.AreEqual(0,G.Minimum); + Assert.AreEqual(0, G.Minimum); } /// @@ -341,7 +341,7 @@ public void Test_Minimum() public void Test_Maximum() { var G = new GammaDistribution(); - Assert.AreEqual(double.PositiveInfinity,G.Maximum); + Assert.AreEqual(double.PositiveInfinity,G.Maximum ); } /// @@ -364,8 +364,8 @@ public void ValidateMLE_NR() double lambda = G.Kappa; double trueA = 0.08833d; double trueL = 16.89937d; - Assert.IsLessThan( 0.2d, (alpha - trueA) / trueA ); - Assert.IsLessThan(0.01d, (lambda - trueL) / trueL); + Assert.IsLessThan(0.2d,(alpha - trueA) / trueA); + Assert.IsLessThan(0.01d,(lambda - trueL) / trueL); } /// @@ -400,10 +400,10 @@ public void Test_PDF() { var G = new GammaDistribution(10,1); Assert.AreEqual(0.090483, G.PDF(1), 1e-04); - Assert.AreEqual(0.036787, G.PDF(10), 1e-04); + Assert.AreEqual(0.036787, G.PDF(10), 1e-04); var G2 = new GammaDistribution(1,1); - Assert.AreEqual(0.367879, G2.PDF(1), 1e-04); + Assert.AreEqual(0.367879, G2.PDF(1), 1e-04); Assert.AreEqual(0.0000453999, G2.PDF(10), 1e-10); } @@ -418,7 +418,7 @@ public void Test_CDF() Assert.AreEqual(0.63212, G.CDF(10), 1e-04); var G2 = new GammaDistribution(1, 1); - Assert.AreEqual(0.999954, G2.CDF(10), 1e-04); + Assert.AreEqual(0.999954, G2.CDF(10), 1e-04); var G3 = new GammaDistribution(0.1, 10); Assert.AreEqual(0.54207028, G3.CDF(1), 1e-04); diff --git a/Test_Numerics/Distributions/Univariate/Test_GeneralizedBeta.cs b/Test_Numerics/Distributions/Univariate/Test_GeneralizedBeta.cs index b6ba675c..edb5db03 100644 --- a/Test_Numerics/Distributions/Univariate/Test_GeneralizedBeta.cs +++ b/Test_Numerics/Distributions/Univariate/Test_GeneralizedBeta.cs @@ -201,10 +201,10 @@ public void Test_Moments() public void Test_Mean() { var b = new GeneralizedBeta(2, 2, 0, 1); - Assert.AreEqual(0.5, b.Mean); + Assert.AreEqual(0.5,b.Mean); var b2 = new GeneralizedBeta(2, 2, -10, 10); - Assert.AreEqual(0, b2.Mean); + Assert.AreEqual(0,b2.Mean); } /// @@ -224,10 +224,10 @@ public void Test_Median() public void Test_Mode() { var b = new GeneralizedBeta(); - Assert.AreEqual(0.5, b.Mode); + Assert.AreEqual(0.5,b.Mode); var b2 = new GeneralizedBeta(2, 2, -10, 10); - Assert.AreEqual(0, b2.Mode); + Assert.AreEqual(0,b2.Mode); } /// @@ -240,7 +240,7 @@ public void Test_StandardDeviation() Assert.AreEqual(0.223606, b.StandardDeviation, 1e-04); var b2 = new GeneralizedBeta(2, 2, -10, 10); - Assert.AreEqual(4.47213, b2.StandardDeviation, 1e-04); + Assert.AreEqual(4.47213, b2.StandardDeviation,1e-04); } /// @@ -250,7 +250,7 @@ public void Test_StandardDeviation() public void Test_Skewness() { var b = new GeneralizedBeta(); - Assert.AreEqual(0, b.Skewness); + Assert.AreEqual(0,b.Skewness); var b2 = new GeneralizedBeta(2, 10); Assert.AreEqual(0.92140088, b2.Skewness, 1e-04); @@ -279,12 +279,12 @@ public void Test_Kurtosis() public void Test_MinimumMaximum() { var b = new GeneralizedBeta(); - Assert.AreEqual(0, b.Minimum); - Assert.AreEqual(1, b.Maximum); + Assert.AreEqual(0,b.Minimum); + Assert.AreEqual(1,b.Maximum); var b2 = new GeneralizedBeta(2, 2, -10, 10); - Assert.AreEqual(-10, b2.Minimum); - Assert.AreEqual(10, b2.Maximum); + Assert.AreEqual(-10,b2.Minimum); + Assert.AreEqual(10,b2.Maximum); } /// @@ -314,7 +314,7 @@ public void Test_CDF() { var b = new GeneralizedBeta(2,2,-10,10); Assert.AreEqual(0,b.CDF(-11)); - Assert.AreEqual(1, b.CDF(11)); + Assert.AreEqual(1,b.CDF(11)); var b2 = new GeneralizedBeta(9, 1); Assert.AreEqual(0, b2.CDF(0)); diff --git a/Test_Numerics/Distributions/Univariate/Test_GeneralizedExtremeValue.cs b/Test_Numerics/Distributions/Univariate/Test_GeneralizedExtremeValue.cs index 2632de48..ca9f0ce3 100644 --- a/Test_Numerics/Distributions/Univariate/Test_GeneralizedExtremeValue.cs +++ b/Test_Numerics/Distributions/Univariate/Test_GeneralizedExtremeValue.cs @@ -205,7 +205,7 @@ public void Test_GEV_StandardError() var covar = GEV.ParameterCovariance(sample.Length, ParameterEstimationMethod.MaximumLikelihood); double qVar = GEV.QuantileVariance(0.99d, sample.Length, ParameterEstimationMethod.MaximumLikelihood); double qSigma = Math.Sqrt(qVar); - Assert.IsLessThan( 0.01d, (partials[0] - true_dXdU) / true_dXdU ); + Assert.IsLessThan(0.01d,(partials[0] - true_dXdU) / true_dXdU); Assert.IsLessThan(0.01d, (partials[1] - true_dxdA) / true_dxdA); Assert.IsLessThan(0.01d, (partials[2] - true_dxdK) / true_dxdK); Assert.IsLessThan(0.01d, (covar[0, 0] - true_VarU) / true_VarU); @@ -333,7 +333,7 @@ public void Test_StandardDeviation() Assert.AreEqual(12.825498, GEV.StandardDeviation, 1e-05); var GEV2 = new GeneralizedExtremeValue(100, 10, 0.49); - Assert.AreEqual(9.280898, GEV2.StandardDeviation, 1e-04); + Assert.AreEqual(9.280898, GEV2.StandardDeviation, 1e-04); var GEV3 = new GeneralizedExtremeValue(100, 10, 1); Assert.AreEqual(double.NaN, GEV3.StandardDeviation); @@ -346,13 +346,13 @@ public void Test_StandardDeviation() public void Test_Skewness() { var GEV = new GeneralizedExtremeValue(); - Assert.AreEqual(1.1396,GEV.Skewness ); + Assert.AreEqual(1.1396, GEV.Skewness); var GEV2 = new GeneralizedExtremeValue(100, 10, 0.3); Assert.AreEqual(-0.0690175, GEV2.Skewness, 1e-03); var GEV3 = new GeneralizedExtremeValue(100, 10, 1); - Assert.AreEqual(double.NaN,GEV3.Skewness); + Assert.AreEqual(double.NaN, GEV3.Skewness); } /// @@ -378,7 +378,7 @@ public void Test_Kurtosis() public void Test_Minimum() { var GEV = new GeneralizedExtremeValue(); - Assert.AreEqual(double.NegativeInfinity,GEV.Minimum); + Assert.AreEqual(double.NegativeInfinity, GEV.Minimum); var GEV2 = new GeneralizedExtremeValue(100, 10, -5); Assert.AreEqual(98, GEV2.Minimum); diff --git a/Test_Numerics/Distributions/Univariate/Test_GeneralizedLogistic.cs b/Test_Numerics/Distributions/Univariate/Test_GeneralizedLogistic.cs index 224c58d0..d5560ded 100644 --- a/Test_Numerics/Distributions/Univariate/Test_GeneralizedLogistic.cs +++ b/Test_Numerics/Distributions/Univariate/Test_GeneralizedLogistic.cs @@ -117,9 +117,9 @@ public void Test_GLO_LMOM_Fit() Assert.AreEqual(k, true_k, 0.001d); var lmom = GLO.LinearMomentsFromParameters(GLO.GetParameters); Assert.AreEqual(1648.806d, lmom[0], 0.001d); - Assert.AreEqual(138.2366d, lmom[1], 0.001d); - Assert.AreEqual(0.1033903d, lmom[2], 0.001d); - Assert.AreEqual(0.1755746d, lmom[3], 0.001d); + Assert.AreEqual(138.2366d, lmom[1], 0.001d); + Assert.AreEqual(0.1033903d, lmom[2], 0.001d); + Assert.AreEqual(0.1755746d, lmom[3], 0.001d); } /// @@ -169,7 +169,7 @@ public void Test_GLO_Quantile() Assert.IsLessThan(0.01d, (q100 - true_100) / true_100); double p = GLO.CDF(q100); double true_p = 0.99d; - Assert.AreEqual(p,true_p); + Assert.AreEqual(p, true_p); } /// @@ -199,7 +199,7 @@ public void Test_Construction() var l = new GeneralizedLogistic(); Assert.AreEqual(100,l.Xi); Assert.AreEqual(10,l.Alpha); - Assert.AreEqual(0, l.Kappa); + Assert.AreEqual(0,l.Kappa); var l2 = new GeneralizedLogistic(-100, 10, 1); Assert.AreEqual(-100,l2.Xi); @@ -323,7 +323,7 @@ public void Test_Skewness() Assert.AreEqual(-10.90354, l2.Skewness, 1e-04); var l3 = new GeneralizedLogistic(100, 10, 1); - Assert.AreEqual(double.NaN,l3.Skewness); + Assert.AreEqual(double.NaN, l3.Skewness); } /// @@ -404,7 +404,7 @@ public void Test_CDF() public void Test_InverseCDF() { var l = new GeneralizedLogistic(); - Assert.AreEqual(double.NegativeInfinity,l.InverseCDF(0) ); + Assert.AreEqual(double.NegativeInfinity,l.InverseCDF(0)); Assert.AreEqual(100, l.InverseCDF(0.5)); Assert.AreEqual(double.PositiveInfinity, l.InverseCDF(1)); diff --git a/Test_Numerics/Distributions/Univariate/Test_GeneralizedPareto.cs b/Test_Numerics/Distributions/Univariate/Test_GeneralizedPareto.cs index 53d8227b..501e3edc 100644 --- a/Test_Numerics/Distributions/Univariate/Test_GeneralizedPareto.cs +++ b/Test_Numerics/Distributions/Univariate/Test_GeneralizedPareto.cs @@ -110,8 +110,8 @@ public void Test_GPA_LMOM_Fit() var lmom = GPA.LinearMomentsFromParameters(GPA.GetParameters); Assert.AreEqual(1648.806d, lmom[0], 0.001d); Assert.AreEqual(138.2366d, lmom[1], 0.001d); - Assert.AreEqual(0.1033903d, lmom[2], 0.001d); - Assert.AreEqual(0.03073215d, lmom[3], 0.001d); + Assert.AreEqual(0.1033903d, lmom[2], 0.001d); + Assert.AreEqual(0.03073215d, lmom[3], 0.001d); } /// @@ -312,7 +312,7 @@ public void Test_Mean() Assert.AreEqual(105.26315, GPA2.Mean, 1e-04); var GPA3 = new GeneralizedPareto(100, 10, 1); - Assert.AreEqual(double.NaN,GPA3.Mean ); + Assert.AreEqual(double.NaN,GPA3.Mean); } /// @@ -335,7 +335,7 @@ public void Test_Median() public void Test_Mode() { var GPA = new GeneralizedPareto(); - Assert.AreEqual(100,GPA.Mode ); + Assert.AreEqual(100, GPA.Mode); var GPA2 = new GeneralizedPareto(100, 10, 1); Assert.AreEqual(95, GPA2.Mode); @@ -396,7 +396,7 @@ public void Test_Kurtosis() public void Test_Minimum() { var GPA = new GeneralizedPareto(); - Assert.AreEqual(100,GPA.Minimum); + Assert.AreEqual(100, GPA.Minimum); } /// @@ -435,7 +435,7 @@ public void Test_CDF() { var GPA = new GeneralizedPareto(); Assert.AreEqual(0, GPA.CDF(100)); - Assert.AreEqual(0, GPA.CDF(0), 1e-04); + Assert.AreEqual(0, GPA.CDF(0), 1e-04); Assert.AreEqual(0.999954, GPA.CDF(200), 1e-06); var GPA2 = new GeneralizedPareto(100, 10, 1); @@ -451,8 +451,8 @@ public void Test_CDF() public void Test_InverseCDF() { var GPA = new GeneralizedPareto(); - Assert.AreEqual(100,GPA.InverseCDF(0)); - Assert.AreEqual(double.PositiveInfinity, GPA.InverseCDF(1)); + Assert.AreEqual(100, GPA.InverseCDF(0)); + Assert.AreEqual(double.PositiveInfinity,GPA.InverseCDF(1)); Assert.AreEqual(106.93147, GPA.InverseCDF(0.5), 1e-04); var GPA2 = new GeneralizedPareto(100, 10, 1); diff --git a/Test_Numerics/Distributions/Univariate/Test_Geometric.cs b/Test_Numerics/Distributions/Univariate/Test_Geometric.cs index 26fbdae3..fe9293c7 100644 --- a/Test_Numerics/Distributions/Univariate/Test_Geometric.cs +++ b/Test_Numerics/Distributions/Univariate/Test_Geometric.cs @@ -91,7 +91,7 @@ public void Test_GeometricDist() public void Test_Construction() { var G = new Geometric(); - Assert.AreEqual(0.5,G.ProbabilityOfSuccess); + Assert.AreEqual(0.5, G.ProbabilityOfSuccess); var G2 = new Geometric(0); Assert.AreEqual(0, G2.ProbabilityOfSuccess); @@ -140,7 +140,7 @@ public void Test_Mean() Assert.AreEqual(1, G.Mean); var G2 = new Geometric(0.3); - Assert.AreEqual(2.3333, G2.Mean, 1e-04); + Assert.AreEqual(2.3333, G2.Mean, 1e-04); } /// @@ -239,7 +239,7 @@ public void Test_PDF() var G2 = new Geometric(0.3); Assert.AreEqual(0.3, G2.PDF(0)); - Assert.AreEqual(0.122989, G2.PDF(2.5), 1e-05); + Assert.AreEqual(0.122989, G2.PDF(2.5), 1e-05); } /// diff --git a/Test_Numerics/Distributions/Univariate/Test_Gumbel.cs b/Test_Numerics/Distributions/Univariate/Test_Gumbel.cs index 67dfb423..0c0ff506 100644 --- a/Test_Numerics/Distributions/Univariate/Test_Gumbel.cs +++ b/Test_Numerics/Distributions/Univariate/Test_Gumbel.cs @@ -188,7 +188,7 @@ public void Test_Construction() var GUM2 = new Gumbel(-100, 1); Assert.AreEqual(-100,GUM2.Xi); - Assert.AreEqual(1,GUM2.Alpha); + Assert.AreEqual(1, GUM2.Alpha); } /// @@ -257,7 +257,7 @@ public void Test_Median() Assert.AreEqual(103.66512, GUM.Median, 1e-05); var GUM2 = new Gumbel(10, 1); - Assert.AreEqual(10.366512, GUM2.Median, 1e-04); + Assert.AreEqual(10.366512, GUM2.Median, 1e-04); } /// @@ -270,7 +270,7 @@ public void Test_StandardDeviation() Assert.AreEqual(12.82549, GUM.StandardDeviation, 1e-04); var GUM2 = new Gumbel(10, 1); - Assert.AreEqual(1.28254, GUM2.StandardDeviation, 1e-04); + Assert.AreEqual(1.28254, GUM2.StandardDeviation, 1e-04); } /// @@ -336,7 +336,7 @@ public void Test_PDF() public void Test_CDF() { var GUM = new Gumbel(); - Assert.AreEqual(0.36787, GUM.CDF(100), 1e-04); + Assert.AreEqual(0.36787, GUM.CDF(100), 1e-04); Assert.AreEqual(3.5073e-65, GUM.CDF(50), 1e-68); Assert.AreEqual(0,GUM.CDF(-10)); diff --git a/Test_Numerics/Distributions/Univariate/Test_InverseChiSquared.cs b/Test_Numerics/Distributions/Univariate/Test_InverseChiSquared.cs index 73f86cf1..ee0651a9 100644 --- a/Test_Numerics/Distributions/Univariate/Test_InverseChiSquared.cs +++ b/Test_Numerics/Distributions/Univariate/Test_InverseChiSquared.cs @@ -216,7 +216,7 @@ public void Test_MinMax() public void Test_PDF() { var IX = new InverseChiSquared(1, 1); - Assert.AreEqual(0.2419, IX.PDF(1),1e-04); + Assert.AreEqual(0.2419, IX.PDF(1), 1e-04); var IX2 = new InverseChiSquared(2, 1); Assert.AreEqual(0.15163, IX2.PDF(2), 1e-04); diff --git a/Test_Numerics/Distributions/Univariate/Test_InverseGamma.cs b/Test_Numerics/Distributions/Univariate/Test_InverseGamma.cs index 47130b7b..eb43498b 100644 --- a/Test_Numerics/Distributions/Univariate/Test_InverseGamma.cs +++ b/Test_Numerics/Distributions/Univariate/Test_InverseGamma.cs @@ -83,7 +83,7 @@ public void Test_Construction() var IG2 = new InverseGamma(2, 4); Assert.AreEqual(2,IG2.Beta); - Assert.AreEqual(4,IG2.Alpha); + Assert.AreEqual(4, IG2.Alpha); } /// @@ -109,10 +109,10 @@ public void Test_InvalidParameters() public void Test_ParametersToString() { var IG = new InverseGamma(); - Assert.AreEqual("Scale (β)",IG.ParametersToString[0, 0]); - Assert.AreEqual("Shape (α)",IG.ParametersToString[1, 0] ); - Assert.AreEqual("0.5",IG.ParametersToString[0, 1] ); - Assert.AreEqual("2",IG.ParametersToString[1, 1]); + Assert.AreEqual("Scale (β)",IG.ParametersToString[0, 0] ); + Assert.AreEqual("Shape (α)", IG.ParametersToString[1, 0]); + Assert.AreEqual("0.5", IG.ParametersToString[0, 1]); + Assert.AreEqual("2", IG.ParametersToString[1, 1]); } /// @@ -122,7 +122,7 @@ public void Test_ParametersToString() public void Test_Mean() { var IG = new InverseGamma(); - Assert.AreEqual(0.5,IG.Mean); + Assert.AreEqual(0.5, IG.Mean); var IG2 = new InverseGamma(1, 1); Assert.AreEqual(double.NaN,IG2.Mean); @@ -148,7 +148,7 @@ public void Test_Mode() Assert.AreEqual(0.1666, IG.Mode, 1e-04); var IG2 = new InverseGamma(1, 1); - Assert.AreEqual(0.5,IG2.Mode); + Assert.AreEqual(0.5, IG2.Mode); } /// @@ -243,7 +243,7 @@ public void Test_InverseCDF() { var IG = new InverseGamma(); Assert.AreEqual(0, IG.InverseCDF(0)); - Assert.AreEqual(double.PositiveInfinity,IG.InverseCDF(1)); + Assert.AreEqual(double.PositiveInfinity, IG.InverseCDF(1)); var IG2 = new InverseGamma(2, 2); Assert.AreEqual(0.81993, IG2.InverseCDF(0.3), 1e-04); diff --git a/Test_Numerics/Distributions/Univariate/Test_LnNormal.cs b/Test_Numerics/Distributions/Univariate/Test_LnNormal.cs index dd7d31ec..30177e06 100644 --- a/Test_Numerics/Distributions/Univariate/Test_LnNormal.cs +++ b/Test_Numerics/Distributions/Univariate/Test_LnNormal.cs @@ -182,7 +182,7 @@ public void Test_Construction() Assert.AreEqual(10, LN.StandardDeviation, 1E-4); var LN2 = new LnNormal(1, 1); - Assert.AreEqual(1, LN2.Mean, 1E-4); + Assert.AreEqual(1, LN2.Mean, 1E-4); Assert.AreEqual(1, LN2.StandardDeviation, 1E-4); } diff --git a/Test_Numerics/Distributions/Univariate/Test_LogNormal.cs b/Test_Numerics/Distributions/Univariate/Test_LogNormal.cs index 36baac9d..39e11410 100644 --- a/Test_Numerics/Distributions/Univariate/Test_LogNormal.cs +++ b/Test_Numerics/Distributions/Univariate/Test_LogNormal.cs @@ -80,8 +80,8 @@ public void Test_LogNormal_MOM_Fit() double u2 = LogN.Sigma; double true_u1 = 10.716952223744224d; double true_u2 = 0.45007398831588075d; - Assert.IsLessThan( 0.01d, (u1 - true_u1) / true_u1 ); - Assert.IsLessThan(0.01d, (u2 - true_u2) / true_u2 ); + Assert.IsLessThan(0.01d, (u1 - true_u1) / true_u1); + Assert.IsLessThan(0.01d, (u2 - true_u2) / true_u2); } /// @@ -102,7 +102,7 @@ public void Test_LogNormal_LMOM_Fit() Assert.AreEqual(u2, true_u2, 0.0001d); var lmom = norm.LinearMomentsFromParameters(norm.GetParameters); Assert.AreEqual(0.96723909d, lmom[0], 0.0001d); - Assert.AreEqual(0.09452119d, lmom[1], 0.0001d); + Assert.AreEqual(0.09452119d, lmom[1], 0.0001d); Assert.AreEqual(0.00000000d, lmom[2], 0.0001d); Assert.AreEqual(0.12260172d, lmom[3], 0.0001d); } @@ -261,7 +261,7 @@ public void Test_PDF() Assert.AreEqual(3.32e-135, LogN.PDF(0.1), 1e-04); var LogN2 = new LogNormal(-0.1, 0.1); - Assert.AreEqual(9.12888e-56, LogN.PDF(0.8), 1e-04); + Assert.AreEqual(9.12888e-56, LogN.PDF(0.8), 1e-04); } /// @@ -274,7 +274,7 @@ public void Test_CDF() Assert.AreEqual(0, LogN.CDF(0.1)); var LogN2 = new LogNormal(1.5, 1.5); - Assert.AreEqual(0.11493, LogN2.CDF(0.5), 1e-05); + Assert.AreEqual(0.11493, LogN2.CDF(0.5), 1e-05); } /// @@ -284,7 +284,7 @@ public void Test_CDF() public void Test_InverseCDF() { var LogN = new LogNormal(2.5, 2.5); - Assert.AreEqual(40183.99248, LogN.InverseCDF(0.8), 1e-04); + Assert.AreEqual(40183.99248, LogN.InverseCDF(0.8), 1e-04); var LogN2 = new LogNormal(1.5, 2.5); Assert.AreEqual(40183.99248, LogN.InverseCDF(0.8), 1e-05); diff --git a/Test_Numerics/Distributions/Univariate/Test_LogPearsonTypeIII.cs b/Test_Numerics/Distributions/Univariate/Test_LogPearsonTypeIII.cs index 8e6f4fac..fd84945e 100644 --- a/Test_Numerics/Distributions/Univariate/Test_LogPearsonTypeIII.cs +++ b/Test_Numerics/Distributions/Univariate/Test_LogPearsonTypeIII.cs @@ -198,7 +198,7 @@ public void Test_LP3_StandardError() LP3 = new LogPearsonTypeIII(2.26878d, 0.10621d, -0.02925d); qVar999 = Math.Sqrt(LP3.QuantileVariance(0.99d, 69, ParameterEstimationMethod.MaximumLikelihood)); true_qVar999 = 25d; - Assert.IsLessThan(0.01d, (qVar999 - true_qVar999) / true_qVar999 ); + Assert.IsLessThan(0.01d, (qVar999 - true_qVar999) / true_qVar999); } @@ -286,13 +286,13 @@ public void Test_Mode() public void Test_Minimum() { var LP3 = new LogPearsonTypeIII(); - Assert.AreEqual(0, LP3.Minimum ); + Assert.AreEqual(0, LP3.Minimum); var LP3ii = new LogPearsonTypeIII(1,1,1); Assert.AreEqual(0.1, LP3ii.Minimum, 1e-05); var LP3iii = new LogPearsonTypeIII(1, -1, 1); - Assert.AreEqual(0,LP3iii.Minimum); + Assert.AreEqual(0, LP3iii.Minimum); } /// @@ -329,7 +329,7 @@ public void Test_PDF() public void Test_CDF() { var LP3 = new LogPearsonTypeIII(); - Assert.AreEqual(0,LP3.CDF(-1)); + Assert.AreEqual(0, LP3.CDF(-1)); Assert.AreEqual(9.8658e-10, LP3.CDF(1), 1e-13); } diff --git a/Test_Numerics/Distributions/Univariate/Test_NoncentralT.cs b/Test_Numerics/Distributions/Univariate/Test_NoncentralT.cs index 4cda1a3d..f8e17c82 100644 --- a/Test_Numerics/Distributions/Univariate/Test_NoncentralT.cs +++ b/Test_Numerics/Distributions/Univariate/Test_NoncentralT.cs @@ -161,7 +161,7 @@ public void Test_Mean() Assert.AreEqual(0, t.Mean); var t2 = new NoncentralT(0, 1); - Assert.AreEqual(double.NaN,t2.Mean); + Assert.AreEqual(double.NaN, t2.Mean); } /// @@ -187,7 +187,7 @@ public void Test_Mode() Assert.AreEqual(0, t.Mode, 1E-4); var t3 = new NoncentralT(10, 1); - Assert.AreEqual(0.9329, t3.Mode, 1e-04); + Assert.AreEqual(0.9329, t3.Mode, 1e-04); } /// diff --git a/Test_Numerics/Distributions/Univariate/Test_Normal.cs b/Test_Numerics/Distributions/Univariate/Test_Normal.cs index a4984e42..889f4e54 100644 --- a/Test_Numerics/Distributions/Univariate/Test_Normal.cs +++ b/Test_Numerics/Distributions/Univariate/Test_Normal.cs @@ -105,9 +105,9 @@ public void Test_Normal_LMOM_Fit() Assert.AreEqual(u2, true_u2, 0.0001d); var lmom = norm.LinearMomentsFromParameters(norm.GetParameters); Assert.AreEqual(9.9575163d, lmom[0], 0.0001d); - Assert.AreEqual(1.9822411d, lmom[1], 0.0001d); + Assert.AreEqual(1.9822411d, lmom[1], 0.0001d); Assert.AreEqual(0.0000000d, lmom[2], 0.0001d); - Assert.AreEqual(0.1226017d, lmom[3], 0.0001d); + Assert.AreEqual(0.1226017d, lmom[3], 0.0001d); } /// @@ -219,9 +219,9 @@ public void Test_ParametersToString() { var N = new Normal(); Assert.AreEqual("Mean (µ)",N.ParametersToString[0, 0]); - Assert.AreEqual("Std Dev (σ)",N.ParametersToString[1, 0]); - Assert.AreEqual("0",N.ParametersToString[0, 1] ); - Assert.AreEqual("1",N.ParametersToString[1,1]); + Assert.AreEqual("Std Dev (σ)",N.ParametersToString[1, 0] ); + Assert.AreEqual("0",N.ParametersToString[0, 1]); + Assert.AreEqual("1",N.ParametersToString[1,1] ); } /// @@ -339,10 +339,10 @@ public void Test_PDF() { var N = new Normal(); Assert.AreEqual(0.39894, N.PDF(0), 1e-04); - Assert.AreEqual(0.24197, N.PDF(1), 1e-04); + Assert.AreEqual(0.24197, N.PDF(1), 1e-04); var N2 = new Normal(5, 9); - Assert.AreEqual(0.03549, N2.PDF(-1), 1e-04); + Assert.AreEqual(0.03549, N2.PDF(-1), 1e-04); } /// diff --git a/Test_Numerics/Distributions/Univariate/Test_Pareto.cs b/Test_Numerics/Distributions/Univariate/Test_Pareto.cs index 564b719c..b160758a 100644 --- a/Test_Numerics/Distributions/Univariate/Test_Pareto.cs +++ b/Test_Numerics/Distributions/Univariate/Test_Pareto.cs @@ -250,7 +250,7 @@ public void Test_PDF() Assert.AreEqual(4d / 9d,p.PDF(1.5) ); var p2 = new Pareto(3, 2); - Assert.AreEqual(2d / 3d,p2.PDF(3) ); + Assert.AreEqual(2d / 3d,p2.PDF(3)); Assert.AreEqual(18d / 125d,p2.PDF(5) ); } @@ -273,7 +273,7 @@ public void Test_InverseCDF() { var p = new Pareto(); Assert.AreEqual(1, p.InverseCDF(0)); - Assert.AreEqual(1.0363, p.InverseCDF(0.3), 1e-04); + Assert.AreEqual(1.0363, p.InverseCDF(0.3), 1e-04); } } } diff --git a/Test_Numerics/Distributions/Univariate/Test_PearsonTypeIII.cs b/Test_Numerics/Distributions/Univariate/Test_PearsonTypeIII.cs index 345cd879..45606a90 100644 --- a/Test_Numerics/Distributions/Univariate/Test_PearsonTypeIII.cs +++ b/Test_Numerics/Distributions/Univariate/Test_PearsonTypeIII.cs @@ -125,9 +125,9 @@ public void Test_P3_LMOM_Fit() Assert.AreEqual(b, true_b, 0.001d); var lmom = P3.LinearMomentsFromParameters(P3.GetParameters); Assert.AreEqual(1648.806d, lmom[0], 0.001d); - Assert.AreEqual(138.2366d, lmom[1], 0.001d); - Assert.AreEqual(0.1033889d, lmom[2], 0.001d); - Assert.AreEqual(0.1258521d, lmom[3], 0.001d); + Assert.AreEqual(138.2366d, lmom[1], 0.001d); + Assert.AreEqual(0.1033889d, lmom[2], 0.001d); + Assert.AreEqual(0.1258521d, lmom[3], 0.001d); } /// @@ -220,7 +220,7 @@ public void Test_P3_StandardError() P3 = new PearsonTypeIII(191.31739d, 47.01925d, 0.61897d); qVar999 = Math.Sqrt(P3.QuantileVariance(0.99d, 69, ParameterEstimationMethod.MaximumLikelihood)); true_qVar999 = 20.045d; - Assert.IsLessThan(0.01d, (qVar999 - true_qVar999) / true_qVar999 ); + Assert.IsLessThan(0.01d, (qVar999 - true_qVar999) / true_qVar999); } /// diff --git a/Test_Numerics/Distributions/Univariate/Test_Pert.cs b/Test_Numerics/Distributions/Univariate/Test_Pert.cs index ab69bf80..bbc4ea78 100644 --- a/Test_Numerics/Distributions/Univariate/Test_Pert.cs +++ b/Test_Numerics/Distributions/Univariate/Test_Pert.cs @@ -268,7 +268,7 @@ public void Test_Skewness() public void Test_Kurtosis() { var p = new Pert(); - Assert.AreEqual(2.3333, p.Kurtosis,1e-04); + Assert.AreEqual(2.3333, p.Kurtosis, 1e-04); } /// diff --git a/Test_Numerics/Distributions/Univariate/Test_Poisson.cs b/Test_Numerics/Distributions/Univariate/Test_Poisson.cs index 61f092f3..0f7694b3 100644 --- a/Test_Numerics/Distributions/Univariate/Test_Poisson.cs +++ b/Test_Numerics/Distributions/Univariate/Test_Poisson.cs @@ -159,7 +159,7 @@ public void Test_Mean() public void Test_Median() { var P = new Poisson(); - Assert.AreEqual(1, P.Median, 1E-4); + Assert.AreEqual(1, P.Median, 1E-4); } /// @@ -225,7 +225,7 @@ public void Test_MinMax() Assert.AreEqual(double.PositiveInfinity,P.Maximum); var P2 = new Poisson(4); - Assert.AreEqual(0, P2.Minimum ); + Assert.AreEqual(0, P2.Minimum); Assert.AreEqual(double.PositiveInfinity, P2.Maximum); } @@ -252,7 +252,7 @@ public void Test_CDF() { var P = new Poisson(1.5); Assert.AreEqual(0.55782, P.CDF(1), 1e-04); - Assert.AreEqual(0.999999, P.CDF(10), 1e-06); + Assert.AreEqual(0.999999, P.CDF(10), 1e-06); var P2 = new Poisson(10.8); Assert.AreEqual(0.00024, P2.CDF(1), 1e-05); diff --git a/Test_Numerics/Distributions/Univariate/Test_Rayleigh.cs b/Test_Numerics/Distributions/Univariate/Test_Rayleigh.cs index dce618fa..66b5c9c6 100644 --- a/Test_Numerics/Distributions/Univariate/Test_Rayleigh.cs +++ b/Test_Numerics/Distributions/Univariate/Test_Rayleigh.cs @@ -84,10 +84,10 @@ public void Test_RayleighDist() public void Test_Construction() { var R = new Rayleigh(); - Assert.AreEqual(R.Sigma, 10); + Assert.AreEqual(10,R.Sigma); var R2 = new Rayleigh(2); - Assert.AreEqual(R2.Sigma, 2); + Assert.AreEqual(2, R2.Sigma); } /// @@ -113,8 +113,8 @@ public void Test_InvalidParameters() public void Test_ParametersToString() { var R = new Rayleigh(); - Assert.AreEqual(R.ParametersToString[0, 0], "Scale (σ)"); - Assert.AreEqual(R.ParametersToString[0, 1], "10"); + Assert.AreEqual("Scale (σ)", R.ParametersToString[0, 0]); + Assert.AreEqual("10", R.ParametersToString[0, 1]); } /// @@ -138,10 +138,10 @@ public void Test_Moments() public void Test_Mean() { var R = new Rayleigh(); - Assert.AreEqual(R.Mean, 12.53314, 1e-04); + Assert.AreEqual(12.53314, R.Mean, 1e-04); var R2 = new Rayleigh(1); - Assert.AreEqual(R2.Mean, 1.25331, 1e-04); + Assert.AreEqual(1.25331, R2.Mean, 1e-04); } /// @@ -151,10 +151,10 @@ public void Test_Mean() public void Test_Median() { var R = new Rayleigh(); - Assert.AreEqual(R.Median, 11.7741, 1e-04); + Assert.AreEqual(11.7741, R.Median, 1e-04); var R2 = new Rayleigh(1); - Assert.AreEqual(R2.Median, 1.1774, 1e-04); + Assert.AreEqual(1.1774, R2.Median, 1e-04); } /// @@ -164,10 +164,10 @@ public void Test_Median() public void Test_Mode() { var R = new Rayleigh(); - Assert.AreEqual(R.Mode, 10); + Assert.AreEqual(10, R.Mode); var R2 = new Rayleigh(1); - Assert.AreEqual(R2.Mode, 1); + Assert.AreEqual(1, R2.Mode); } /// @@ -177,10 +177,10 @@ public void Test_Mode() public void Test_StandardDeviation() { var R = new Rayleigh(); - Assert.AreEqual(R.StandardDeviation, 6.55136, 1e-05); + Assert.AreEqual(6.55136, R.StandardDeviation, 1e-05); var R2 = new Rayleigh(1); - Assert.AreEqual(R2.StandardDeviation, 0.65513, 1e-04); + Assert.AreEqual(0.65513, R2.StandardDeviation, 1e-04); } /// @@ -190,10 +190,10 @@ public void Test_StandardDeviation() public void Test_Skewness() { var R = new Rayleigh(); - Assert.AreEqual(R.Skewness, 0.63111, 1e-04); + Assert.AreEqual(0.63111, R.Skewness, 1e-04); var R2 = new Rayleigh(1); - Assert.AreEqual(R2.Skewness, 0.63111, 1e-04); + Assert.AreEqual(0.63111, R2.Skewness, 1e-04); } /// @@ -203,10 +203,10 @@ public void Test_Skewness() public void Test_Kurtosis() { var R = new Rayleigh(); - Assert.AreEqual(R.Kurtosis, 3.24508,1e-05); + Assert.AreEqual(3.24508, R.Kurtosis, 1e-05); var R2 = new Rayleigh(1); - Assert.AreEqual(R2.Kurtosis, 3.24508,1e-05); + Assert.AreEqual(3.24508, R2.Kurtosis,1e-05); } /// @@ -216,8 +216,8 @@ public void Test_Kurtosis() public void Test_MinMax() { var R = new Rayleigh(); - Assert.AreEqual(R.Minimum, 0); - Assert.AreEqual(R.Maximum, double.PositiveInfinity); + Assert.AreEqual(0,R.Minimum); + Assert.AreEqual(double.PositiveInfinity,R.Maximum); } /// @@ -227,11 +227,11 @@ public void Test_MinMax() public void Test_PDF() { var R = new Rayleigh(); - Assert.AreEqual(R.PDF(-1), 0); - Assert.AreEqual(R.PDF(1), 9.9501e-03, 1e-06); + Assert.AreEqual(0,R.PDF(-1)); + Assert.AreEqual(9.9501e-03, R.PDF(1), 1e-06); var R2 = new Rayleigh(1); - Assert.AreEqual(R.PDF(2), 0.019603, 1e-05); + Assert.AreEqual(0.019603, R.PDF(2), 1e-05); } /// @@ -241,8 +241,8 @@ public void Test_PDF() public void Test_CDF() { var R = new Rayleigh(); - Assert.AreEqual(R.CDF(-1), 0); - Assert.AreEqual(R.CDF(1), 4.9875e-03,1e-04); + Assert.AreEqual(0, R.CDF(-1)); + Assert.AreEqual(4.9875e-03, R.CDF(1),1e-04); } /// @@ -252,9 +252,9 @@ public void Test_CDF() public void Test_InverseCDF() { var R = new Rayleigh(); - Assert.AreEqual(R.InverseCDF(0), 0); - Assert.AreEqual(R.InverseCDF(1), double.PositiveInfinity); - Assert.AreEqual(R.InverseCDF(0.4), 10.1076, 1e-04); + Assert.AreEqual(0, R.InverseCDF(0)); + Assert.AreEqual(double.PositiveInfinity,R.InverseCDF(1) ); + Assert.AreEqual(10.1076, R.InverseCDF(0.4), 1e-04); } } } diff --git a/Test_Numerics/Distributions/Univariate/Test_StudentT.cs b/Test_Numerics/Distributions/Univariate/Test_StudentT.cs index eddf7d04..e8e66234 100644 --- a/Test_Numerics/Distributions/Univariate/Test_StudentT.cs +++ b/Test_Numerics/Distributions/Univariate/Test_StudentT.cs @@ -111,14 +111,14 @@ public void Test_StudentT_InverseCDF() public void Test_Construction() { var t = new StudentT(); - Assert.AreEqual(t.Mu, 0); - Assert.AreEqual(t.Sigma, 1); - Assert.AreEqual(t.DegreesOfFreedom, 10); + Assert.AreEqual(0, t.Mu); + Assert.AreEqual(1, t.Sigma); + Assert.AreEqual(10, t.DegreesOfFreedom); var t2 = new StudentT(10, 10, 10); - Assert.AreEqual(t2.Mu, 10); - Assert.AreEqual(t2.Sigma, 10); - Assert.AreEqual(t2.DegreesOfFreedom, 10); + Assert.AreEqual(10, t2.Mu); + Assert.AreEqual(10, t2.Sigma); + Assert.AreEqual(10, t2.DegreesOfFreedom); } /// @@ -144,12 +144,12 @@ public void Test_InvalidParameters() public void Test_ParametersToString() { var t = new StudentT(); - Assert.AreEqual(t.ParametersToString[0, 0], "Location (µ)"); - Assert.AreEqual(t.ParametersToString[1, 0], "Scale (σ)"); - Assert.AreEqual(t.ParametersToString[2, 0], "Degrees of Freedom (ν)"); - Assert.AreEqual(t.ParametersToString[0, 1], "0"); - Assert.AreEqual(t.ParametersToString[1,1],"1"); - Assert.AreEqual(t.ParametersToString[2, 1], "10"); + Assert.AreEqual("Location (µ)", t.ParametersToString[0, 0]); + Assert.AreEqual("Scale (σ)", t.ParametersToString[1, 0]); + Assert.AreEqual("Degrees of Freedom (ν)", t.ParametersToString[2, 0]); + Assert.AreEqual("0", t.ParametersToString[0, 1]); + Assert.AreEqual("1", t.ParametersToString[1, 1]); + Assert.AreEqual("10", t.ParametersToString[2, 1]); } /// @@ -173,10 +173,10 @@ public void Test_Moments() public void Test_Mean() { var t = new StudentT(); - Assert.AreEqual(t.Mean, 0); + Assert.AreEqual(0, t.Mean); var t2 = new StudentT(1, 1, 1); - Assert.AreEqual(t2.Mean, double.NaN); + Assert.AreEqual(double.NaN, t2.Mean); } /// @@ -186,10 +186,10 @@ public void Test_Mean() public void Test_Median() { var t = new StudentT(); - Assert.AreEqual(t.Median, 0); + Assert.AreEqual(0, t.Median); var t2 = new StudentT(1, 1, 1); - Assert.AreEqual(t2.Median, 1); + Assert.AreEqual(1, t2.Median); } /// @@ -199,10 +199,10 @@ public void Test_Median() public void Test_Mode() { var t = new StudentT(); - Assert.AreEqual(t.Mode, 0); + Assert.AreEqual(0, t.Mode); var t2 = new StudentT(1,1,1); - Assert.AreEqual(t2.Mode, 1); + Assert.AreEqual(1, t2.Mode); } /// @@ -212,13 +212,13 @@ public void Test_Mode() public void Test_StandardDeviation() { var t = new StudentT(); - Assert.AreEqual(t.StandardDeviation, 1.11803, 1e-04); + Assert.AreEqual(1.11803, t.StandardDeviation, 1e-04); var t2 = new StudentT(1, 1, 2); - Assert.AreEqual(t2.StandardDeviation,double.PositiveInfinity); + Assert.AreEqual(double.PositiveInfinity, t2.StandardDeviation); var t3 = new StudentT(1, 1, 1); - Assert.AreEqual(t3.StandardDeviation, double.NaN); + Assert.AreEqual(double.NaN, t3.StandardDeviation); } /// @@ -228,10 +228,10 @@ public void Test_StandardDeviation() public void Test_Skewness() { var t = new StudentT(); - Assert.AreEqual(t.Skewness, 0); + Assert.AreEqual(0, t.Skewness); var t2 = new StudentT(1, 1, 1); - Assert.AreEqual(t2.Skewness, double.NaN); + Assert.AreEqual(double.NaN,t2.Skewness ); } /// @@ -241,13 +241,13 @@ public void Test_Skewness() public void Test_Kurtosis() { var t = new StudentT(); - Assert.AreEqual(t.Kurtosis, 4); + Assert.AreEqual(4, t.Kurtosis ); var t2 = new StudentT(1, 1, 4); - Assert.AreEqual(t2.Kurtosis, double.PositiveInfinity); + Assert.AreEqual(double.PositiveInfinity, t2.Kurtosis); var t3 = new StudentT(1, 1, 2); - Assert.AreEqual(t3.Kurtosis, double.NaN); + Assert.AreEqual(double.NaN, t3.Kurtosis); } /// @@ -257,8 +257,8 @@ public void Test_Kurtosis() public void Test_MinMax() { var t = new StudentT(); - Assert.AreEqual(t.Minimum, double.NegativeInfinity); - Assert.AreEqual(t.Maximum, double.PositiveInfinity); + Assert.AreEqual(double.NegativeInfinity, t.Minimum); + Assert.AreEqual(double.PositiveInfinity, t.Maximum); } } } diff --git a/Test_Numerics/Distributions/Univariate/Test_Triangular.cs b/Test_Numerics/Distributions/Univariate/Test_Triangular.cs index 348e38db..49756414 100644 --- a/Test_Numerics/Distributions/Univariate/Test_Triangular.cs +++ b/Test_Numerics/Distributions/Univariate/Test_Triangular.cs @@ -136,14 +136,14 @@ public void Test_Triangular_MLE() public void Test_Construction() { var T = new Triangular(); - Assert.AreEqual(T.Min, 0); - Assert.AreEqual(T.Mode, 0.5); - Assert.AreEqual(T.Max, 1); + Assert.AreEqual(0,T.Min); + Assert.AreEqual(0.5, T.Mode); + Assert.AreEqual(1, T.Max); var T2 = new Triangular(-1,1,2); - Assert.AreEqual(T2.Min, -1); - Assert.AreEqual(T2.Mode, 1); - Assert.AreEqual(T2.Max, 2); + Assert.AreEqual(-1, T2.Min); + Assert.AreEqual(1, T2.Mode); + Assert.AreEqual(2, T2.Max); } /// @@ -172,12 +172,12 @@ public void Test_InvalidParameters() public void Test_ParametersToString() { var T = new Triangular(); - Assert.AreEqual(T.ParametersToString[0, 0], "Min (a)"); - Assert.AreEqual(T.ParametersToString[1, 0], "Most Likely (c)"); - Assert.AreEqual(T.ParametersToString[2, 0], "Max (b)"); - Assert.AreEqual(T.ParametersToString[0, 1], "0"); - Assert.AreEqual(T.ParametersToString[1, 1], "0.5"); - Assert.AreEqual(T.ParametersToString[2, 1], "1"); + Assert.AreEqual("Min (a)",T.ParametersToString[0, 0] ); + Assert.AreEqual("Most Likely (c)",T.ParametersToString[1, 0] ); + Assert.AreEqual("Max (b)",T.ParametersToString[2, 0] ); + Assert.AreEqual("0", T.ParametersToString[0, 1]); + Assert.AreEqual("0.5", T.ParametersToString[1, 1]); + Assert.AreEqual("1", T.ParametersToString[2, 1]); } /// @@ -201,10 +201,10 @@ public void Test_Moments() public void Test_Mean() { var T = new Triangular(); - Assert.AreEqual(T.Mean, 0.5); + Assert.AreEqual(0.5, T.Mean); var T2 = new Triangular(1, 3, 6); - Assert.AreEqual(T2.Mean, 3.3333, 1e-04); + Assert.AreEqual(3.3333, T2.Mean, 1e-04); } /// @@ -214,10 +214,10 @@ public void Test_Mean() public void Test_Median() { var T = new Triangular(); - Assert.AreEqual(T.Median, 0.5); + Assert.AreEqual(0.5, T.Median); var T2 = new Triangular(1,3,6); - Assert.AreEqual(T2.Median, 3.26138, 1e-05); + Assert.AreEqual(3.26138, T2.Median, 1e-05); } /// @@ -227,10 +227,10 @@ public void Test_Median() public void Test_Mode() { var T = new Triangular(); - Assert.AreEqual(T.Mode, 0.5); + Assert.AreEqual(0.5, T.Mode); var T2 = new Triangular(1, 3, 6); - Assert.AreEqual(T2.Mode, 3); + Assert.AreEqual(3, T2.Mode); } /// @@ -240,10 +240,10 @@ public void Test_Mode() public void Test_StandardDeviation() { var T = new Triangular(); - Assert.AreEqual(T.StandardDeviation, 0.20412, 1e-04); + Assert.AreEqual(0.20412, T.StandardDeviation, 1e-04); var T2 = new Triangular(1, 3, 6); - Assert.AreEqual(T2.StandardDeviation, 1.02739, 1e-04); + Assert.AreEqual(1.02739, T2.StandardDeviation, 1e-04); } /// @@ -253,7 +253,7 @@ public void Test_StandardDeviation() public void Test_Skewness() { var T = new Triangular(); - Assert.AreEqual(T.Skewness, 0); + Assert.AreEqual(0, T.Skewness); } /// @@ -263,10 +263,10 @@ public void Test_Skewness() public void Test_Kurtosis() { var T = new Triangular(); - Assert.AreEqual(T.Kurtosis, 12d / 5d); + Assert.AreEqual(12d / 5d, T.Kurtosis); var T2 = new Triangular(1, 3, 6); - Assert.AreEqual(T2.Kurtosis, 12d / 5d); + Assert.AreEqual(12d / 5d, T2.Kurtosis); } /// @@ -276,12 +276,12 @@ public void Test_Kurtosis() public void Test_MinMax() { var T = new Triangular(); - Assert.AreEqual(T.Minimum, 0); - Assert.AreEqual(T.Maximum, 1); + Assert.AreEqual(0, T.Minimum); + Assert.AreEqual(1, T.Maximum); var T2 = new Triangular(1, 3, 6); - Assert.AreEqual(T2.Minimum, 1); - Assert.AreEqual(T2.Maximum, 6); + Assert.AreEqual(1, T2.Minimum); + Assert.AreEqual(6, T2.Maximum); } /// @@ -291,13 +291,13 @@ public void Test_MinMax() public void Test_PDF() { var T = new Triangular(); - Assert.AreEqual(T.PDF(-1), 0); - Assert.AreEqual(T.PDF(0.4), 1.6); - Assert.AreEqual(T.PDF(0.6), 1.6); - Assert.AreEqual(T.PDF(0.5), 2); + Assert.AreEqual(0,T.PDF(-1)); + Assert.AreEqual(1.6, T.PDF(0.4)); + Assert.AreEqual(1.6, T.PDF(0.6)); + Assert.AreEqual(2, T.PDF(0.5)); var T2 = new Triangular(1, 3, 6); - Assert.AreEqual(T2.PDF(2), 0.2, 1e-04); + Assert.AreEqual(0.2, T2.PDF(2), 1e-04); } /// @@ -307,13 +307,13 @@ public void Test_PDF() public void Test_CDF() { var T = new Triangular(); - Assert.AreEqual(T.CDF(-1), 0); - Assert.AreEqual(T.CDF(2), 1); - Assert.AreEqual(T.CDF(0.4), 0.32,1e-04); - Assert.AreEqual(T.CDF(0.6), 0.68,1e-04); + Assert.AreEqual(0, T.CDF(-1)); + Assert.AreEqual(1, T.CDF(2)); + Assert.AreEqual(0.32, T.CDF(0.4), 1e-04); + Assert.AreEqual(0.68, T.CDF(0.6), 1e-04); var T2 = new Triangular(1,3, 6); - Assert.AreEqual(T2.CDF(2), 0.1, 1e-04); + Assert.AreEqual(0.1, T2.CDF(2), 1e-04); } /// @@ -323,10 +323,10 @@ public void Test_CDF() public void Test_InverseCDF() { var T = new Triangular(); - Assert.AreEqual(T.InverseCDF(0), 0); - Assert.AreEqual(T.InverseCDF(1), 1); - Assert.AreEqual(T.InverseCDF(0.2), 0.31622, 1e-04); - Assert.AreEqual(T.InverseCDF(0.5), 0.5); + Assert.AreEqual(0, T.InverseCDF(0)); + Assert.AreEqual(1,T.InverseCDF(1)); + Assert.AreEqual(0.31622, T.InverseCDF(0.2), 1e-04); + Assert.AreEqual(0.5, T.InverseCDF(0.5)); } } } diff --git a/Test_Numerics/Distributions/Univariate/Test_TruncatedDistribution.cs b/Test_Numerics/Distributions/Univariate/Test_TruncatedDistribution.cs index 7bf07f0f..9af0500e 100644 --- a/Test_Numerics/Distributions/Univariate/Test_TruncatedDistribution.cs +++ b/Test_Numerics/Distributions/Univariate/Test_TruncatedDistribution.cs @@ -59,27 +59,27 @@ public void Test_TruncatedNormalDist() var p = tn.CDF(1.5); var q = tn.InverseCDF(p); - Assert.AreEqual(d, 0.9786791, 1E-5); - Assert.AreEqual(p, 0.3460251, 1E-5); - Assert.AreEqual(q, 1.5, 1E-5); + Assert.AreEqual(0.9786791, d, 1E-5); + Assert.AreEqual(0.3460251, p, 1E-5); + Assert.AreEqual(1.5, q, 1E-5); tn = new TruncatedDistribution(new Normal(10, 3), 8, 25); d = tn.PDF(12.75); p = tn.CDF(12.75); q = tn.InverseCDF(p); - Assert.AreEqual(d, 0.1168717, 1E-5); - Assert.AreEqual(p, 0.7596566, 1E-5); - Assert.AreEqual(q, 12.75, 1E-5); + Assert.AreEqual(0.1168717, d, 1E-5); + Assert.AreEqual(0.7596566, p, 1E-5); + Assert.AreEqual(12.75, q, 1E-5); tn = new TruncatedDistribution(new Normal(0, 3), 0, 9); d = tn.PDF(4.5); p = tn.CDF(4.5); q = tn.InverseCDF(p); - Assert.AreEqual(d, 0.08657881, 1E-5); - Assert.AreEqual(p, 0.868731, 1E-5); - Assert.AreEqual(q, 4.5, 1E-5); + Assert.AreEqual(0.08657881, d, 1E-5); + Assert.AreEqual(0.868731, p, 1E-5); + Assert.AreEqual(4.5, q, 1E-5); } @@ -90,16 +90,16 @@ public void Test_TruncatedNormalDist() public void Test_Construction() { var tn = new TruncatedDistribution(new Normal(0.5, 0.2), 0, 1); - Assert.AreEqual(((Normal)tn.BaseDistribution).Mu, 0.5); - Assert.AreEqual(((Normal)tn.BaseDistribution).Sigma, 0.2); - Assert.AreEqual(tn.Min, 0); - Assert.AreEqual(tn.Max, 1); + Assert.AreEqual(0.5,((Normal)tn.BaseDistribution).Mu); + Assert.AreEqual(0.2, ((Normal)tn.BaseDistribution).Sigma); + Assert.AreEqual(0, tn.Min); + Assert.AreEqual(1, tn.Max); var tn2 = new TruncatedDistribution(new Normal(1, 1), 1, 2); - Assert.AreEqual(((Normal)tn2.BaseDistribution).Mu, 1); - Assert.AreEqual(((Normal)tn2.BaseDistribution).Sigma, 1); - Assert.AreEqual(tn2.Min, 1); - Assert.AreEqual(tn2.Max, 2); + Assert.AreEqual(1, ((Normal)tn2.BaseDistribution).Mu); + Assert.AreEqual(1, ((Normal)tn2.BaseDistribution).Sigma); + Assert.AreEqual(1, tn2.Min); + Assert.AreEqual(2, tn2.Max); } /// @@ -109,14 +109,14 @@ public void Test_Construction() public void Test_ParametersToString() { var tn = new TruncatedDistribution(new Normal(0.5, 0.2), 0, 1); - Assert.AreEqual(tn.ParametersToString[0, 0], "Mean (µ)"); - Assert.AreEqual(tn.ParametersToString[1, 0], "Std Dev (σ)"); - Assert.AreEqual(tn.ParametersToString[2, 0], "Min"); - Assert.AreEqual(tn.ParametersToString[3, 0], "Max"); - Assert.AreEqual(tn.ParametersToString[0, 1], "0.5"); - Assert.AreEqual(tn.ParametersToString[1, 1], "0.2"); - Assert.AreEqual(tn.ParametersToString[2, 1], "0"); - Assert.AreEqual(tn.ParametersToString[3, 1], "1"); + Assert.AreEqual("Mean (µ)",tn.ParametersToString[0, 0]); + Assert.AreEqual("Std Dev (σ)", tn.ParametersToString[1, 0]); + Assert.AreEqual("Min", tn.ParametersToString[2, 0]); + Assert.AreEqual("Max", tn.ParametersToString[3, 0]); + Assert.AreEqual("0.5", tn.ParametersToString[0, 1]); + Assert.AreEqual("0.2", tn.ParametersToString[1, 1]); + Assert.AreEqual("0", tn.ParametersToString[2, 1]); + Assert.AreEqual("1", tn.ParametersToString[3, 1]); } /// @@ -140,7 +140,7 @@ public void Test_Moments() public void Test_Mean() { var tn = new TruncatedDistribution(new Normal(0.5, 0.2), 0, 1); - Assert.AreEqual(tn.Mean, 0.5, 1e-4); + Assert.AreEqual(0.5, tn.Mean, 1e-4); } /// @@ -150,7 +150,7 @@ public void Test_Mean() public void Test_Median() { var tn = new TruncatedDistribution(new Normal(0.5, 0.2), 0, 1); - Assert.AreEqual(tn.Median, 0.5, 1e-4); + Assert.AreEqual(0.5, tn.Median, 1e-4); } /// @@ -160,7 +160,7 @@ public void Test_Median() public void Test_Mode() { var tn = new TruncatedDistribution(new Normal(0.5, 0.2), 0, 1); - Assert.AreEqual(tn.Mode, 0.5, 1e-4); + Assert.AreEqual(0.5, tn.Mode, 1e-4); } /// @@ -170,7 +170,7 @@ public void Test_Mode() public void Test_StandardDeviation() { var tn = new TruncatedDistribution(new Normal(0.5, 0.2), 0, 1); - Assert.AreEqual(tn.StandardDeviation, 0.19091, 1e-4); + Assert.AreEqual(0.19091, tn.StandardDeviation, 1e-4); } /// @@ -180,7 +180,7 @@ public void Test_StandardDeviation() public void Test_Skewness() { var tn = new TruncatedDistribution(new Normal(0.5, 0.2), 0, 1); - Assert.AreEqual(tn.Skewness, 0, 1E-4); + Assert.AreEqual(0, tn.Skewness, 1E-4); } /// @@ -190,7 +190,7 @@ public void Test_Skewness() public void Test_Kurtosis() { var tn = new TruncatedDistribution(new Normal(0.5, 0.2), 0, 1); - Assert.AreEqual(tn.Kurtosis, 2.62422, 1e-04); + Assert.AreEqual(2.62422, tn.Kurtosis, 1e-04); } /// @@ -200,8 +200,8 @@ public void Test_Kurtosis() public void Test_MinMax() { var tn = new TruncatedDistribution(new Normal(0.5, 0.2), 0, 1); - Assert.AreEqual(tn.Minimum, 0); - Assert.AreEqual(tn.Maximum, 1); + Assert.AreEqual(0, tn.Minimum); + Assert.AreEqual(1, tn.Maximum); } } } diff --git a/Test_Numerics/Distributions/Univariate/Test_TruncatedNormal.cs b/Test_Numerics/Distributions/Univariate/Test_TruncatedNormal.cs index d7fb8b80..92d1a714 100644 --- a/Test_Numerics/Distributions/Univariate/Test_TruncatedNormal.cs +++ b/Test_Numerics/Distributions/Univariate/Test_TruncatedNormal.cs @@ -66,27 +66,27 @@ public void Test_TruncatedNormalDist() var p = tn.CDF(1.5); var q = tn.InverseCDF(p); - Assert.AreEqual(d, 0.9786791, 1E-5); - Assert.AreEqual(p, 0.3460251, 1E-5); - Assert.AreEqual(q, 1.5, 1E-5); + Assert.AreEqual(0.9786791, d, 1E-5); + Assert.AreEqual(0.3460251, p, 1E-5); + Assert.AreEqual(1.5, q, 1E-5); tn = new TruncatedNormal(10, 3, 8, 25); d = tn.PDF(12.75); p = tn.CDF(12.75); q = tn.InverseCDF(p); - Assert.AreEqual(d, 0.1168717, 1E-5); - Assert.AreEqual(p, 0.7596566, 1E-5); - Assert.AreEqual(q, 12.75, 1E-5); + Assert.AreEqual(0.1168717, d ,1E-5); + Assert.AreEqual(0.7596566, p, 1E-5); + Assert.AreEqual(12.75, q, 1E-5); tn = new TruncatedNormal(0, 3, 0, 9); d = tn.PDF(4.5); p = tn.CDF(4.5); q = tn.InverseCDF(p); - Assert.AreEqual(d, 0.08657881, 1E-5); - Assert.AreEqual(p, 0.868731, 1E-5); - Assert.AreEqual(q, 4.5, 1E-5); + Assert.AreEqual(0.08657881, d, 1E-5); + Assert.AreEqual(0.868731, p, 1E-5); + Assert.AreEqual(4.5, q, 1E-5); } @@ -97,16 +97,16 @@ public void Test_TruncatedNormalDist() public void Test_Construction() { var tn = new TruncatedNormal(); - Assert.AreEqual(tn.Mu, 0.5); - Assert.AreEqual(tn.Sigma, 0.2); - Assert.AreEqual(tn.Min, 0); - Assert.AreEqual(tn.Max, 1); + Assert.AreEqual(0.5,tn.Mu ); + Assert.AreEqual(0.2,tn.Sigma ); + Assert.AreEqual(0,tn.Min); + Assert.AreEqual(1,tn.Max); var tn2 = new TruncatedNormal(1, 1, 1, 2); - Assert.AreEqual(tn2.Mu, 1); - Assert.AreEqual(tn2.Sigma, 1); - Assert.AreEqual(tn2.Min, 1); - Assert.AreEqual(tn2.Max, 2); + Assert.AreEqual(1,tn2.Mu); + Assert.AreEqual(1,tn2.Sigma); + Assert.AreEqual(1,tn2.Min); + Assert.AreEqual(2,tn2.Max); } /// @@ -135,14 +135,14 @@ public void Test_InvalidParameters() public void Test_ParametersToString() { var tn = new TruncatedNormal(); - Assert.AreEqual(tn.ParametersToString[0, 0], "Mean (µ)"); - Assert.AreEqual(tn.ParametersToString[1, 0], "Std Dev (σ)"); - Assert.AreEqual(tn.ParametersToString[2, 0], "Min"); - Assert.AreEqual(tn.ParametersToString[3, 0], "Max"); - Assert.AreEqual(tn.ParametersToString[0, 1], "0.5"); - Assert.AreEqual(tn.ParametersToString[1, 1], "0.2"); - Assert.AreEqual(tn.ParametersToString[2, 1], "0"); - Assert.AreEqual(tn.ParametersToString[3, 1], "1"); + Assert.AreEqual("Mean (µ)",tn.ParametersToString[0, 0]); + Assert.AreEqual("Std Dev (σ)",tn.ParametersToString[1, 0]); + Assert.AreEqual("Min",tn.ParametersToString[2, 0] ); + Assert.AreEqual("Max", tn.ParametersToString[3, 0] ); + Assert.AreEqual("0.5",tn.ParametersToString[0, 1]); + Assert.AreEqual("0.2", tn.ParametersToString[1, 1]); + Assert.AreEqual("0", tn.ParametersToString[2, 1]); + Assert.AreEqual("1", tn.ParametersToString[3, 1]); } /// @@ -166,7 +166,7 @@ public void Test_Moments() public void Test_Mean() { var tn = new TruncatedNormal(); - Assert.AreEqual(tn.Mean, 0.5); + Assert.AreEqual(0.5,tn.Mean); } /// @@ -176,7 +176,7 @@ public void Test_Mean() public void Test_Median() { var tn = new TruncatedNormal(); - Assert.AreEqual(tn.Median, 0.5); + Assert.AreEqual(0.5, tn.Median); } /// @@ -186,7 +186,7 @@ public void Test_Median() public void Test_Mode() { var tn = new TruncatedNormal(); - Assert.AreEqual(tn.Mode, 0.5); + Assert.AreEqual(0.5, tn.Mode); } /// @@ -196,7 +196,7 @@ public void Test_Mode() public void Test_StandardDeviation() { var tn = new TruncatedNormal(); - Assert.AreEqual(tn.StandardDeviation, 0.19091,1e-05); + Assert.AreEqual(0.19091, tn.StandardDeviation, 1e-05); } /// @@ -206,7 +206,7 @@ public void Test_StandardDeviation() public void Test_Skewness() { var tn = new TruncatedNormal(); - Assert.AreEqual(tn.Skewness, 0); + Assert.AreEqual(0, tn.Skewness); } /// @@ -216,7 +216,7 @@ public void Test_Skewness() public void Test_Kurtosis() { var tn = new TruncatedNormal(); - Assert.AreEqual(tn.Kurtosis, 2.62422, 1e-04); + Assert.AreEqual(2.62422, tn.Kurtosis, 1e-04); } /// @@ -226,8 +226,8 @@ public void Test_Kurtosis() public void Test_MinMax() { var tn = new TruncatedNormal(); - Assert.AreEqual(tn.Minimum, 0); - Assert.AreEqual(tn.Maximum, 1); + Assert.AreEqual(0, tn.Minimum); + Assert.AreEqual(1, tn.Maximum); } } } diff --git a/Test_Numerics/Distributions/Univariate/Test_Uniform.cs b/Test_Numerics/Distributions/Univariate/Test_Uniform.cs index 7b2cd14d..56f17684 100644 --- a/Test_Numerics/Distributions/Univariate/Test_Uniform.cs +++ b/Test_Numerics/Distributions/Univariate/Test_Uniform.cs @@ -105,12 +105,12 @@ public void Test_Uniform_R() public void Test_Construction() { var U = new Uniform(); - Assert.AreEqual(U.Min, 0); - Assert.AreEqual(U.Max, 1); + Assert.AreEqual(0,U.Min); + Assert.AreEqual(1, U.Max); var U2 = new Uniform(2,10); - Assert.AreEqual(U2.Min, 2); - Assert.AreEqual(U2.Max, 10); + Assert.AreEqual(2, U2.Min); + Assert.AreEqual(10, U2.Max); } /// @@ -142,10 +142,10 @@ public void Test_InvalidParameters() public void Test_ParametersToString() { var U = new Uniform(); - Assert.AreEqual(U.ParametersToString[0, 0], "Min"); - Assert.AreEqual(U.ParametersToString[1, 0], "Max"); - Assert.AreEqual(U.ParametersToString[0, 1], "0"); - Assert.AreEqual(U.ParametersToString[1, 1], "1"); + Assert.AreEqual("Min",U.ParametersToString[0, 0]); + Assert.AreEqual("Max", U.ParametersToString[1, 0]); + Assert.AreEqual("0", U.ParametersToString[0, 1]); + Assert.AreEqual("1", U.ParametersToString[1, 1]); } /// @@ -169,10 +169,10 @@ public void Test_Moments() public void Test_Mean() { var U = new Uniform(); - Assert.AreEqual(U.Mean, 0.5); + Assert.AreEqual(0.5, U.Mean); var U2 = new Uniform(2, 10); - Assert.AreEqual(U2.Mean, 6); + Assert.AreEqual(6, U2.Mean); } /// @@ -182,10 +182,10 @@ public void Test_Mean() public void Test_Median() { var U = new Uniform(); - Assert.AreEqual(U.Median, 0.5); + Assert.AreEqual(0.5, U.Median); var U2 = new Uniform(2, 10); - Assert.AreEqual(U2.Median, 6); + Assert.AreEqual(6, U2.Median); } /// @@ -195,10 +195,10 @@ public void Test_Median() public void Test_Mode() { var U = new Uniform(); - Assert.AreEqual(U.Mode,double.NaN); + Assert.AreEqual(double.NaN,U.Mode); var U2 = new Uniform(2, 10); - Assert.AreEqual(U2.Mode,double.NaN); + Assert.AreEqual(double.NaN, U2.Mode); } /// @@ -208,10 +208,10 @@ public void Test_Mode() public void Test_StandardDeviation() { var U = new Uniform(); - Assert.AreEqual(U.StandardDeviation, 0.288675, 1e-05); + Assert.AreEqual(0.288675, U.StandardDeviation, 1e-05); var U2 = new Uniform(2, 10); - Assert.AreEqual(U2.StandardDeviation, 2.3094, 1e-04); + Assert.AreEqual(2.3094, U2.StandardDeviation, 1e-04); } /// @@ -221,10 +221,10 @@ public void Test_StandardDeviation() public void Test_Skewness() { var U = new Uniform(); - Assert.AreEqual(U.Skewness, 0); + Assert.AreEqual(0, U.Skewness); var U2 = new Uniform(2, 10); - Assert.AreEqual(U2.Skewness, 0); + Assert.AreEqual(0, U2.Skewness); } /// @@ -234,10 +234,10 @@ public void Test_Skewness() public void Test_Kurtosis() { var U = new Uniform(); - Assert.AreEqual(U.Kurtosis, 9d / 5d); + Assert.AreEqual(9d / 5d,U.Kurtosis); var U2 = new Uniform(2, 10); - Assert.AreEqual(U2.Kurtosis, 9d / 5d); + Assert.AreEqual(9d / 5d, U2.Kurtosis); } /// @@ -247,12 +247,12 @@ public void Test_Kurtosis() public void Test_MinMax() { var U = new Uniform(); - Assert.AreEqual(U.Minimum, 0); - Assert.AreEqual(U.Maximum, 1); + Assert.AreEqual(0, U.Minimum); + Assert.AreEqual(1, U.Maximum); var U2 = new Uniform(2, 10); - Assert.AreEqual(U2.Minimum, 2); - Assert.AreEqual(U2.Maximum, 10); + Assert.AreEqual(2, U2.Minimum); + Assert.AreEqual(10, U2.Maximum); } /// @@ -262,9 +262,9 @@ public void Test_MinMax() public void Test_PDF() { var U = new Uniform(); - Assert.AreEqual(U.PDF(-1),0); - Assert.AreEqual(U.PDF(2),0); - Assert.AreEqual(U.PDF(1), 1); + Assert.AreEqual(0, U.PDF(-1)); + Assert.AreEqual(0, U.PDF(2)); + Assert.AreEqual(1, U.PDF(1)); } /// @@ -274,9 +274,9 @@ public void Test_PDF() public void Test_CDF() { var U = new Uniform(); - Assert.AreEqual(U.CDF(0),0); - Assert.AreEqual(U.CDF(1),1); - Assert.AreEqual(U.CDF(0.5), 0.5); + Assert.AreEqual(0,U.CDF(0)); + Assert.AreEqual(1,U.CDF(1)); + Assert.AreEqual(0.5,U.CDF(0.5)); } /// @@ -286,9 +286,9 @@ public void Test_CDF() public void Test_InverseCDF() { var U = new Uniform(); - Assert.AreEqual(U.InverseCDF(0), 0); - Assert.AreEqual(U.InverseCDF(1), 1); - Assert.AreEqual(U.InverseCDF(0.3), 0.3); + Assert.AreEqual(0,U.InverseCDF(0)); + Assert.AreEqual(1,U.InverseCDF(1)); + Assert.AreEqual(0.3,U.InverseCDF(0.3)); } } } diff --git a/Test_Numerics/Distributions/Univariate/Test_UniformDiscrete.cs b/Test_Numerics/Distributions/Univariate/Test_UniformDiscrete.cs index bb12c566..60264d36 100644 --- a/Test_Numerics/Distributions/Univariate/Test_UniformDiscrete.cs +++ b/Test_Numerics/Distributions/Univariate/Test_UniformDiscrete.cs @@ -90,12 +90,12 @@ public void Test_UniformDiscreteDist() public void Test_Construction() { var U = new UniformDiscrete(); - Assert.AreEqual(U.Min, 0); - Assert.AreEqual(U.Max, 1); + Assert.AreEqual(0,U.Min); + Assert.AreEqual(1,U.Max); var U2 = new UniformDiscrete(2, 10); - Assert.AreEqual(U2.Min, 2); - Assert.AreEqual(U2.Max, 10); + Assert.AreEqual(2, U2.Min); + Assert.AreEqual(10, U2.Max); } /// @@ -127,10 +127,10 @@ public void Test_InvalidParameters() public void Test_ParametersToString() { var U = new UniformDiscrete(); - Assert.AreEqual(U.ParametersToString[0, 0], "Min"); - Assert.AreEqual(U.ParametersToString[1, 0], "Max"); - Assert.AreEqual(U.ParametersToString[0, 1], "0"); - Assert.AreEqual(U.ParametersToString[1, 1], "1"); + Assert.AreEqual("Min",U.ParametersToString[0, 0] ); + Assert.AreEqual("Max", U.ParametersToString[1, 0]); + Assert.AreEqual("0", U.ParametersToString[0, 1]); + Assert.AreEqual("1", U.ParametersToString[1, 1]); } /// @@ -140,10 +140,10 @@ public void Test_ParametersToString() public void Test_Mean() { var U = new UniformDiscrete(); - Assert.AreEqual(U.Mean, 0.5); + Assert.AreEqual(0.5, U.Mean); var U2 = new UniformDiscrete(2, 10); - Assert.AreEqual(U2.Mean, 6); + Assert.AreEqual(6, U2.Mean); } /// @@ -153,10 +153,10 @@ public void Test_Mean() public void Test_Median() { var U = new UniformDiscrete(); - Assert.AreEqual(U.Median, 0.5); + Assert.AreEqual(0.5, U.Median); var U2 = new UniformDiscrete(2, 10); - Assert.AreEqual(U2.Median, 6); + Assert.AreEqual(6, U2.Median); } /// @@ -166,10 +166,10 @@ public void Test_Median() public void Test_Mode() { var U = new UniformDiscrete(); - Assert.AreEqual(U.Mode, double.NaN); + Assert.AreEqual(double.NaN,U.Mode); var U2 = new UniformDiscrete(2, 10); - Assert.AreEqual(U2.Mode, double.NaN); + Assert.AreEqual(double.NaN, U2.Mode); } /// @@ -179,10 +179,10 @@ public void Test_Mode() public void Test_StandardDeviation() { var U = new UniformDiscrete(); - Assert.AreEqual(U.StandardDeviation, 0.288675, 1e-05); + Assert.AreEqual(0.288675, U.StandardDeviation, 1e-05); var U2 = new UniformDiscrete(2, 10); - Assert.AreEqual(U2.StandardDeviation, 2.3094, 1e-04); + Assert.AreEqual(2.3094, U2.StandardDeviation, 1e-04); } /// @@ -192,10 +192,10 @@ public void Test_StandardDeviation() public void Test_Skewness() { var U = new UniformDiscrete(); - Assert.AreEqual(U.Skewness, 0); + Assert.AreEqual(0, U.Skewness); var U2 = new UniformDiscrete(2, 10); - Assert.AreEqual(U2.Skewness, 0); + Assert.AreEqual(0, U2.Skewness); } /// @@ -205,10 +205,10 @@ public void Test_Skewness() public void Test_Kurtosis() { var U = new UniformDiscrete(); - Assert.AreEqual(U.Kurtosis, 1); + Assert.AreEqual(1, U.Kurtosis); var U2 = new UniformDiscrete(2, 10); - Assert.AreEqual(U2.Kurtosis, 1.77); + Assert.AreEqual(1.77, U2.Kurtosis); } /// @@ -218,12 +218,12 @@ public void Test_Kurtosis() public void Test_MinMax() { var U = new UniformDiscrete(); - Assert.AreEqual(U.Minimum, 0); - Assert.AreEqual(U.Maximum, 1); + Assert.AreEqual(0, U.Minimum); + Assert.AreEqual(1, U.Maximum); var U2 = new UniformDiscrete(2, 10); - Assert.AreEqual(U2.Minimum, 2); - Assert.AreEqual(U2.Maximum, 10); + Assert.AreEqual(2, U2.Minimum); + Assert.AreEqual(10, U2.Maximum); } /// @@ -233,9 +233,9 @@ public void Test_MinMax() public void Test_PDF() { var U = new UniformDiscrete(); - Assert.AreEqual(U.PDF(-1), 0); - Assert.AreEqual(U.PDF(2), 0); - Assert.AreEqual(U.PDF(1), 0.5); + Assert.AreEqual(0, U.PDF(-1)); + Assert.AreEqual(0, U.PDF(2)); + Assert.AreEqual(0.5, U.PDF(1)); } /// @@ -245,9 +245,9 @@ public void Test_PDF() public void Test_CDF() { var U = new UniformDiscrete(); - Assert.AreEqual(U.CDF(0), 0.5); - Assert.AreEqual(U.CDF(1), 1); - Assert.AreEqual(U.CDF(0.5), 0.75); + Assert.AreEqual(0.5, U.CDF(0)); + Assert.AreEqual(1, U.CDF(1)); + Assert.AreEqual(0.75, U.CDF(0.5)); } /// @@ -257,9 +257,9 @@ public void Test_CDF() public void Test_InverseCDF() { var U = new UniformDiscrete(); - Assert.AreEqual(U.InverseCDF(0), 0); - Assert.AreEqual(U.InverseCDF(1), 1); - Assert.AreEqual(U.InverseCDF(0.3), 0); + Assert.AreEqual(0, U.InverseCDF(0)); + Assert.AreEqual(1,U.InverseCDF(1)); + Assert.AreEqual(0,U.InverseCDF(0.3)); } } diff --git a/Test_Numerics/Distributions/Univariate/Test_Weibull.cs b/Test_Numerics/Distributions/Univariate/Test_Weibull.cs index 1b9ea373..79284819 100644 --- a/Test_Numerics/Distributions/Univariate/Test_Weibull.cs +++ b/Test_Numerics/Distributions/Univariate/Test_Weibull.cs @@ -71,8 +71,8 @@ public void Test_Weibull_MLE_Fit() double kappa = W.Kappa; double true_L = 9.589d; double true_k = 1.907d; - Assert.AreEqual((lamda - true_L) / true_L < 0.01d, true); - Assert.AreEqual((kappa - true_k) / true_k < 0.01d, true); + Assert.IsLessThan(0.01d,(lamda - true_L) / true_L); + Assert.IsLessThan(0.01d,(kappa - true_k) / true_k); } /// @@ -92,10 +92,10 @@ public void Test_Weibull_Quantile() var W = new Weibull(9.589d, 1.907d); double q100 = W.InverseCDF(0.99d); double true_q100 = 21.358d; - Assert.AreEqual((q100 - true_q100) / true_q100 < 0.01d, true); + Assert.IsLessThan(0.01d, (q100 - true_q100) / true_q100); double p = W.CDF(q100); double true_p = 0.99d; - Assert.AreEqual((p - true_p) / true_p < 0.01d, true); + Assert.IsLessThan(0.01d, (p - true_p) / true_p); } /// @@ -117,7 +117,7 @@ public void Test_Weibull_StandardError() var GUM = new Gumbel(8049.6d, 4478.6d); double qVar99 = Math.Sqrt(GUM.QuantileVariance(0.99d, 53, ParameterEstimationMethod.MaximumLikelihood)); double true_qVar99 = 2486.5d; - Assert.AreEqual((qVar99 - true_qVar99) / true_qVar99 < 0.01d, true); + Assert.IsLessThan(0.01d, (qVar99 - true_qVar99) / true_qVar99); } /// @@ -137,8 +137,8 @@ public void Test_Weibull_GOF() modeled[i] = W.CDF(sample[i]); double true_AIC = 294.5878d; double true_BIC = 298.1566d; - Assert.AreEqual((AIC - true_AIC) / true_AIC < 0.01d, true); - Assert.AreEqual((BIC - true_BIC) / true_BIC < 0.01d, true); + Assert.IsLessThan(0.01d, (AIC - true_AIC) / true_AIC); + Assert.IsLessThan(0.01d, (BIC - true_BIC) / true_BIC); } /// @@ -148,12 +148,12 @@ public void Test_Weibull_GOF() public void Test_Construction() { var W = new Weibull(); - Assert.AreEqual(W.Lambda, 10); - Assert.AreEqual(W.Kappa, 2); + Assert.AreEqual(10,W.Lambda); + Assert.AreEqual(2, W.Kappa); var W2 = new Weibull(1, 1); - Assert.AreEqual(W2.Lambda, 1); - Assert.AreEqual(W2.Kappa, 1); + Assert.AreEqual(1, W2.Lambda); + Assert.AreEqual(1, W2.Kappa); } /// @@ -182,10 +182,10 @@ public void Test_InvalidParameters() public void Test_ParametersToString() { var W = new Weibull(); - Assert.AreEqual(W.ParametersToString[0, 0], "Scale (λ)"); - Assert.AreEqual(W.ParametersToString[1, 0], "Shape (κ)"); - Assert.AreEqual(W.ParametersToString[0, 1], "10"); - Assert.AreEqual(W.ParametersToString[1, 1], "2"); + Assert.AreEqual("Scale (λ)",W.ParametersToString[0, 0] ); + Assert.AreEqual("Shape (κ)", W.ParametersToString[1, 0]); + Assert.AreEqual("10", W.ParametersToString[0, 1]); + Assert.AreEqual("2", W.ParametersToString[1, 1]); } /// @@ -209,10 +209,10 @@ public void Test_Moments() public void Test_Mean() { var W = new Weibull(0.1, 1); - Assert.AreEqual(W.Mean, 0.1); + Assert.AreEqual(0.1, W.Mean); var W2 = new Weibull(1, 1); - Assert.AreEqual(W2.Mean, 1); + Assert.AreEqual(1, W2.Mean); } /// @@ -222,10 +222,10 @@ public void Test_Mean() public void Test_Median() { var W = new Weibull(0.1, 1); - Assert.AreEqual(W.Median, 0.06931, 1e-04); + Assert.AreEqual(0.06931, W.Median, 1e-04); var W2 = new Weibull(1, 1); - Assert.AreEqual(W2.Median, 0.69314, 1e-04); + Assert.AreEqual(0.69314, W2.Median, 1e-04); } /// @@ -235,10 +235,10 @@ public void Test_Median() public void Test_Mode() { var W = new Weibull(0.1, 1); - Assert.AreEqual(W.Mode, 0); + Assert.AreEqual(0, W.Mode); var W2 = new Weibull(10, 10); - Assert.AreEqual(W2.Mode, 9.89519, 1e-05); + Assert.AreEqual(9.89519, W2.Mode, 1e-05); } /// @@ -248,10 +248,10 @@ public void Test_Mode() public void Test_StandardDeviation() { var W = new Weibull(0.1, 1); - Assert.AreEqual(W.StandardDeviation, 0.1); + Assert.AreEqual(0.1, W.StandardDeviation); var W2 = new Weibull(1, 1); - Assert.AreEqual(W2.StandardDeviation, 1); + Assert.AreEqual(1, W2.StandardDeviation); } /// @@ -261,10 +261,10 @@ public void Test_StandardDeviation() public void Test_Skewness() { var W = new Weibull(0.1, 1); - Assert.AreEqual(W.Skewness, 2,1e-04); + Assert.AreEqual(2, W.Skewness, 1e-04); var W2 = new Weibull(1, 1); - Assert.AreEqual(W2.Skewness, 2); + Assert.AreEqual(2, W2.Skewness); } /// @@ -274,10 +274,10 @@ public void Test_Skewness() public void Test_Kurtosis() { var W = new Weibull(); - Assert.AreEqual(W.Kurtosis, 3.24508,1e-04); + Assert.AreEqual(3.24508, W.Kurtosis,1e-04); var W2 = new Weibull(1, 1); - Assert.AreEqual(W2.Kurtosis, 9); + Assert.AreEqual(9, W2.Kurtosis); } /// @@ -287,8 +287,8 @@ public void Test_Kurtosis() public void Test_MinMax() { var W = new Weibull(); - Assert.AreEqual(W.Minimum, 0); - Assert.AreEqual(W.Maximum,double.PositiveInfinity); + Assert.AreEqual(0, W.Minimum); + Assert.AreEqual(double.PositiveInfinity,W.Maximum); } /// @@ -298,9 +298,9 @@ public void Test_MinMax() public void Test_PDF() { var W = new Weibull(1, 1); - Assert.AreEqual(W.PDF(0), 1); - Assert.AreEqual(W.PDF(1), 0.36787, 1e-05); - Assert.AreEqual(W.PDF(10), 0.00004539, 1e-08); + Assert.AreEqual(1, W.PDF(0)); + Assert.AreEqual(0.36787, W.PDF(1), 1e-05); + Assert.AreEqual(0.00004539, W.PDF(10), 1e-08); } /// @@ -310,9 +310,9 @@ public void Test_PDF() public void Test_CDF() { var W = new Weibull(1, 1); - Assert.AreEqual(W.CDF(0), 0); - Assert.AreEqual(W.CDF(1), 0.63212, 1e-05); - Assert.AreEqual(W.CDF(10), 0.99995, 1e-05); + Assert.AreEqual(0, W.CDF(0)); + Assert.AreEqual(0.63212, W.CDF(1), 1e-05); + Assert.AreEqual(0.99995, W.CDF(10), 1e-05); } /// @@ -322,9 +322,9 @@ public void Test_CDF() public void Test_InverseCDF() { var W = new Weibull(); - Assert.AreEqual(W.InverseCDF(0),0); - Assert.AreEqual(W.InverseCDF(1),double.PositiveInfinity); - Assert.AreEqual(W.InverseCDF(0.4), 7.1472, 1e-04); + Assert.AreEqual(0,W.InverseCDF(0)); + Assert.AreEqual(double.PositiveInfinity,W.InverseCDF(1)); + Assert.AreEqual(7.1472, W.InverseCDF(0.4), 1e-04); } } } diff --git a/Test_Numerics/Functions/Test_Functions.cs b/Test_Numerics/Functions/Test_Functions.cs index 3028e140..06cf0a27 100644 --- a/Test_Numerics/Functions/Test_Functions.cs +++ b/Test_Numerics/Functions/Test_Functions.cs @@ -232,12 +232,12 @@ public void Test_Tabular_Function() // Given X double X = 50.0; double Y = func.Function(X); - Assert.AreEqual(Y, 100.0); + Assert.AreEqual(100.0,Y); // Given Y double Y2 = 100d; double X2 = func.InverseFunction(Y2); - Assert.AreEqual(X, 50.0); + Assert.AreEqual(50.0, X); // Given X - Interpolation double X3 = 75.0d; diff --git a/Test_Numerics/Machine Learning/Supervised/Test_DecisionTree.cs b/Test_Numerics/Machine Learning/Supervised/Test_DecisionTree.cs index 46a67fe8..ec39e56c 100644 --- a/Test_Numerics/Machine Learning/Supervised/Test_DecisionTree.cs +++ b/Test_Numerics/Machine Learning/Supervised/Test_DecisionTree.cs @@ -90,7 +90,7 @@ public void Test_DecisionTree_Iris() var accuracy = GoodnessOfFit.Accuracy(Y_test.Array, prediction); // Accuracy should be greater than or equal to 90% - Assert.IsGreaterThanOrEqualTo(90,accuracy ); + Assert.IsGreaterThanOrEqualTo(90,accuracy); } diff --git a/Test_Numerics/Machine Learning/Supervised/Test_RandomForest.cs b/Test_Numerics/Machine Learning/Supervised/Test_RandomForest.cs index d79c48e7..e779599d 100644 --- a/Test_Numerics/Machine Learning/Supervised/Test_RandomForest.cs +++ b/Test_Numerics/Machine Learning/Supervised/Test_RandomForest.cs @@ -133,7 +133,7 @@ public void Test_RandomForest_Regression() var lmR2 = GoodnessOfFit.RSquared(Y_test.Array, lmPredict); // Random Forest is better - Assert.IsGreaterThan(lmR2, rfR2 ); + Assert.IsGreaterThan(lmR2,rfR2); } } diff --git a/Test_Numerics/Mathematics/Integration/Test_Vegas.cs b/Test_Numerics/Mathematics/Integration/Test_Vegas.cs index bfd47fae..1c25940e 100644 --- a/Test_Numerics/Mathematics/Integration/Test_Vegas.cs +++ b/Test_Numerics/Mathematics/Integration/Test_Vegas.cs @@ -209,12 +209,12 @@ public void Test_PowerTransform_RareUpperTailEvent() // Analytical approximation: P(Sum > mean + 3σ) = P(Z > 3) ≈ 0.00135 // We expect something in the ballpark of 1e-3 to 2e-3 - Assert.IsGreaterThan(5E-4, failureProbability ); - Assert.IsLessThan(5E-3, failureProbability ); + Assert.IsGreaterThan(5E-4, failureProbability, $"Probability too small: {failureProbability:E6}"); + Assert.IsLessThan(5E-3, failureProbability, $"Probability too large: {failureProbability:E6}"); // Standard error should be reasonable (less than 50% of estimate) double relativeError = vegas.StandardError / Math.Abs(failureProbability); - Assert.IsLessThan(0.5, relativeError); + Assert.IsLessThan(0.5, relativeError, $"Relative error too large: {relativeError:P1}"); } /// @@ -255,12 +255,12 @@ public void Test_PowerTransform_VeryRareEvent() var failureProbability = vegas.Result; // Should be in ballpark of 1e-6 to 1e-5 - Assert.IsGreaterThan(1E-7, failureProbability); - Assert.IsLessThan(1E-4, failureProbability); + Assert.IsGreaterThan(1E-7, failureProbability, $"Probability too small: {failureProbability:E2}"); + Assert.IsLessThan(1E-4,failureProbability, $"Probability too large: {failureProbability:E2}"); // For very rare events, relative error can be higher but should be finite double relativeError = vegas.StandardError / Math.Abs(failureProbability); - Assert.IsLessThan(1.0, relativeError); + Assert.IsLessThan(1.0, relativeError, $"Relative error too large: {relativeError:P1}"); Assert.IsFalse(double.IsNaN(failureProbability), "Result should not be NaN"); Assert.IsFalse(double.IsInfinity(failureProbability), "Result should not be infinite"); } @@ -311,13 +311,13 @@ public void Test_PowerTransform_ProbabilityRange() vegas.Integrate(); // Verify samples were generated - Assert.IsGreaterThan( 0, sampleCount ); + Assert.IsGreaterThan(0,sampleCount , "No samples generated"); // With γ=4, should see strong tail focus (many samples near 1.0) - Assert.IsGreaterThan(0.99, maxObserved ); + Assert.IsGreaterThan(0.99,maxObserved, $"Max probability too low: {maxObserved}"); // Should still have some diversity (not all at 1.0) - Assert.IsLessThan(0.5, minObserved ); + Assert.IsLessThan(0.5,minObserved, $"Min probability too high: {minObserved}"); } diff --git a/Test_Numerics/Mathematics/Linear Algebra/Test_EigenValueDecomposition.cs b/Test_Numerics/Mathematics/Linear Algebra/Test_EigenValueDecomposition.cs index d0dd8fb0..8bfc4c58 100644 --- a/Test_Numerics/Mathematics/Linear Algebra/Test_EigenValueDecomposition.cs +++ b/Test_Numerics/Mathematics/Linear Algebra/Test_EigenValueDecomposition.cs @@ -185,7 +185,7 @@ public void SymEig_3x3_RepeatedEigenvalues_AllTwos() // Max eigen residual var maxRes = MaxEigenResidual(A, V, w); - Assert.IsLessThan(1e-12, maxRes); + Assert.IsLessThan(1e-12, maxRes, $"Max eigen residual too large: {maxRes}"); } /// @@ -232,7 +232,7 @@ public void SymEig_8x8_TridiagonalToeplitz_KnownSpectrum() // Max eigen residual var maxRes = MaxEigenResidual(A, V, w); - Assert.IsLessThan(1e-8, maxRes); + Assert.IsLessThan(1e-8, maxRes, $"Max eigen residual too large: {maxRes}"); } /// @@ -268,7 +268,7 @@ public void SymEig_5x5_NearlyDiagonal_SmallCoupling() // Max eigen residual var maxRes = MaxEigenResidual(A, V, w); - Assert.IsLessThan(1e-8, maxRes); + Assert.IsLessThan(1e-8, maxRes, $"Max eigen residual too large: {maxRes}"); } // ---------- Helpers ---------- diff --git a/Test_Numerics/Mathematics/Linear Algebra/Test_GaussJordanElimination.cs b/Test_Numerics/Mathematics/Linear Algebra/Test_GaussJordanElimination.cs index 8359847e..1de9a5f5 100644 --- a/Test_Numerics/Mathematics/Linear Algebra/Test_GaussJordanElimination.cs +++ b/Test_Numerics/Mathematics/Linear Algebra/Test_GaussJordanElimination.cs @@ -59,7 +59,7 @@ public void Test_GaussJordanElim() for (int i = 0; i < A.NumberOfRows; i++) { for (int j = 0; j < A.NumberOfColumns - 1; j++) - Assert.AreEqual(A[i, j] == true_IA[i, j], true); + Assert.AreEqual(A[i, j],true_IA[i, j]); } /// Recreated Gauss Jordan test in R to compare the inverted A matrices. diff --git a/Test_Numerics/Mathematics/Special Functions/Test_Gamma.cs b/Test_Numerics/Mathematics/Special Functions/Test_Gamma.cs index 953ceefb..8c67223d 100644 --- a/Test_Numerics/Mathematics/Special Functions/Test_Gamma.cs +++ b/Test_Numerics/Mathematics/Special Functions/Test_Gamma.cs @@ -68,7 +68,7 @@ public void Test_Function() for (int i = 0; i < testValid.Length; i++) { testResults[i] = Gamma.Function(testX[i]); - Assert.IsLessThan(0.01, Math.Abs(testValid[i] - testResults[i]) / testValid[i]); + Assert.IsLessThan(0.01d, Math.Abs(testValid[i] - testResults[i]) / testValid[i]); } } @@ -85,7 +85,7 @@ public void Test_Lanczos() for (int i = 0; i < testValid.Length; i++) { testResults[i] = Gamma.Lanczos(testX[i]); - Assert.IsLessThan(0.01, Math.Abs(testValid[i] - testResults[i]) / testValid[i]); + Assert.IsLessThan(0.01d, Math.Abs(testValid[i] - testResults[i]) / testValid[i]); } } @@ -157,7 +157,7 @@ public void Test_Trigamma() for (int i = 0; i < testValid.Length; i++) { testResults[i] = Gamma.Trigamma(testX[i]); - Assert.IsLessThan(0.01, Math.Abs(testValid[i] - testResults[i]) / testValid[i]); + Assert.IsLessThan(0.01d,Math.Abs(testValid[i] - testResults[i]) / testValid[i]); } } @@ -187,7 +187,7 @@ public void Test_LogGamma() for (int i = 0; i < testValid.Length; i++) { testResults[i] = Gamma.LogGamma(testX[i]); - Assert.IsLessThan(0.01, Math.Abs(testValid[i] - testResults[i]) / testValid[i]); + Assert.IsLessThan(0.01d,Math.Abs(testValid[i] - testResults[i]) / testValid[i]); } } @@ -227,7 +227,7 @@ public void Test_Incomplete() for (int i = 0; i < testValid.Length; i++) { testResults[i] = Gamma.Incomplete(testX[i], testA[i]); - Assert.IsLessThan(0.01,Math.Abs(testValid[i] - testResults[i]) / testValid[i]); + Assert.IsLessThan(0.01d, Math.Abs(testValid[i] - testResults[i]) / testValid[i]); } } diff --git a/Test_Numerics/Mathematics/Special Functions/Test_SpecialFunctions.cs b/Test_Numerics/Mathematics/Special Functions/Test_SpecialFunctions.cs index 30dd152e..be9786fe 100644 --- a/Test_Numerics/Mathematics/Special Functions/Test_SpecialFunctions.cs +++ b/Test_Numerics/Mathematics/Special Functions/Test_SpecialFunctions.cs @@ -209,7 +209,7 @@ public void Test_CombinationsNum() var ccLen = cc.Length; // Length of cc should be the possible number of combinations * the number of elements in each combination (5) - Assert.AreEqual(possible * 5, ccLen); + Assert.HasCount((int)possible * 5, cc); // How many of subsets of combinations there should be // For example, there are 5 ways to have only one #1 in the array, with the other 4 elements being #0 diff --git a/Test_Numerics/Sampling/Test_Stratification.cs b/Test_Numerics/Sampling/Test_Stratification.cs index 8a6f04d4..7ce29f83 100644 --- a/Test_Numerics/Sampling/Test_Stratification.cs +++ b/Test_Numerics/Sampling/Test_Stratification.cs @@ -117,7 +117,7 @@ public void Test_XToProbability() weights += probs[i].Weight; } // Check weights sum to 1.0 - Assert.AreEqual(weights, 1.0d, 1E-8); + Assert.AreEqual(1.0d,weights, 1E-8); } /// @@ -143,7 +143,7 @@ public void Test_XToExceedanceProbability() weights += probs[i].Weight; } // Check weights sum to 1.0 - Assert.AreEqual(weights, 1.0d, 1E-8); + Assert.AreEqual(1.0d, weights, 1E-8); } @@ -203,7 +203,7 @@ public void Test_Probabilities() weights += bins[i].Weight; } // Check weights sum to 1.0 - Assert.AreEqual(weights, 1.0d, 1E-8); + Assert.AreEqual(1.0d, weights, 1E-8); } /// @@ -225,7 +225,7 @@ public void Test_Probabilities_Multi() weights += bins[i].Weight; } // Check weights sum to 1.0 - Assert.AreEqual(weights, 1.0d, 1E-8); + Assert.AreEqual(1.0d, weights, 1E-8); } /// @@ -244,7 +244,7 @@ public void Test_Probabilities_Log10() weights += bins[i].Weight; } // Check weights sum to 1.0 - Assert.AreEqual(weights, 1.0d, 1E-8); + Assert.AreEqual(1.0d, weights, 1E-8); } /// @@ -264,7 +264,7 @@ public void Test_Probabilities_Normal() weights += bins[i].Weight; } // Check weights sum to 1.0 - Assert.AreEqual(weights, 1.0d, 1E-8); + Assert.AreEqual(1.0d, weights, 1E-8); } /// @@ -283,7 +283,7 @@ public void Test_ExceedanceProbabilities() weights += bins[i].Weight; } // Check weights sum to 1.0 - Assert.AreEqual(weights, 1.0d, 1E-8); + Assert.AreEqual(1.0d, weights, 1E-8); } /// @@ -305,7 +305,7 @@ public void Test_ExceedanceProbabilities_Multi() weights += bins[i].Weight; } // Check weights sum to 1.0 - Assert.AreEqual(weights, 1.0d, 1E-8); + Assert.AreEqual(1.0d, weights, 1E-8); } /// @@ -324,7 +324,7 @@ public void Test_ExceedanceProbabilities_Log10() weights += bins[i].Weight; } // Check weights sum to 1.0 - Assert.AreEqual(weights, 1.0d, 1E-8); + Assert.AreEqual(1.0d, weights, 1E-8); } /// @@ -343,7 +343,7 @@ public void Test_ExceedanceProbabilities_Normal() weights += bins[i].Weight; } // Check weights sum to 1.0 - Assert.AreEqual(weights, 1.0d, 1E-8); + Assert.AreEqual(1.0d, weights, 1E-8); } } } diff --git a/Test_Numerics/Serialization/JsonConverterDemo.cs b/Test_Numerics/Serialization/JsonConverterDemo.cs index 50e936c4..c98258fd 100644 --- a/Test_Numerics/Serialization/JsonConverterDemo.cs +++ b/Test_Numerics/Serialization/JsonConverterDemo.cs @@ -39,14 +39,14 @@ namespace Test_Numerics.Serialization /// Demonstration of custom JSON converters for complex types. /// [TestClass] - public static class JsonConverterDemo + public class JsonConverterDemo { /// /// Demonstrates how the custom converters handle 2D arrays and complex distribution objects. /// /// [TestMethod] - public static void RunDemo() + public void RunDemo() { Console.WriteLine("=== JSON Converter Demo ===\n"); diff --git a/Test_Numerics/Serialization/Test_JsonSerialization.cs b/Test_Numerics/Serialization/Test_JsonSerialization.cs index 51c25d9b..6ec44ec2 100644 --- a/Test_Numerics/Serialization/Test_JsonSerialization.cs +++ b/Test_Numerics/Serialization/Test_JsonSerialization.cs @@ -99,14 +99,16 @@ public void Test_UncertaintyAnalysisResults_ArraySerialization() // Assert Assert.IsNotNull(deserialized.ModeCurve); - Assert.HasCount(original.ModeCurve.Length, deserialized.ModeCurve); + var modeCurveLen = original.ModeCurve.Length; + Assert.HasCount(modeCurveLen, deserialized.ModeCurve); for (int i = 0; i < original.ModeCurve.Length; i++) { Assert.AreEqual(original.ModeCurve[i], deserialized.ModeCurve[i], 1e-10); } Assert.IsNotNull(deserialized.MeanCurve); - Assert.HasCount(original.MeanCurve.Length, deserialized.MeanCurve); + var meanCurveLen = original.MeanCurve.Length; + Assert.HasCount(meanCurveLen, deserialized.MeanCurve); for (int i = 0; i < original.MeanCurve.Length; i++) { Assert.AreEqual(original.MeanCurve[i], deserialized.MeanCurve[i], 1e-10); @@ -165,7 +167,8 @@ public void Test_UncertaintyAnalysisResults_ParameterSetsSerialization() var desLen = deserialized.ParameterSets.Length; // Assert Assert.IsNotNull(deserialized.ParameterSets); - Assert.AreEqual(origLen, desLen); + var parameterSetsLen = original.ParameterSets.Length; + Assert.HasCount(parameterSetsLen, deserialized.ParameterSets); for (int i = 0; i < original.ParameterSets.Length; i++) { @@ -175,7 +178,8 @@ public void Test_UncertaintyAnalysisResults_ParameterSetsSerialization() if (original.ParameterSets[i].Values != null) { Assert.IsNotNull(deserialized.ParameterSets[i].Values); - Assert.HasCount(original.ParameterSets[i].Values.Length, deserialized.ParameterSets[i].Values); + var parameterValuesLen = original.ParameterSets[i].Values.Length; + Assert.HasCount(parameterValuesLen, deserialized.ParameterSets[i].Values); for (int j = 0; j < original.ParameterSets[i].Values.Length; j++) { @@ -248,11 +252,11 @@ public void Test_UncertaintyAnalysisResults_EmptyArrays() // Assert Assert.IsNotNull(deserialized); Assert.IsNotNull(deserialized.ParameterSets); - Assert.IsEmpty( deserialized.ParameterSets); + Assert.HasCount(0, deserialized.ParameterSets); Assert.IsNotNull(deserialized.ModeCurve); - Assert.IsEmpty(deserialized.ModeCurve); + Assert.HasCount(0, deserialized.ModeCurve); Assert.IsNotNull(deserialized.MeanCurve); - Assert.IsEmpty(deserialized.MeanCurve); + Assert.HasCount(0, deserialized.MeanCurve); } /// @@ -300,7 +304,8 @@ public void Test_MCMCResults_BasicSerialization() // Assert Assert.IsNotNull(deserialized); Assert.IsNotNull(deserialized.AcceptanceRates); - Assert.AreEqual(origLen, desLen); + var acceptanceRatesLen = original.AcceptanceRates.Length; + Assert.HasCount(acceptanceRatesLen, deserialized.AcceptanceRates); for (int i = 0; i < original.AcceptanceRates.Length; i++) { @@ -331,12 +336,14 @@ public void Test_MCMCResults_MarkovChainsSerialization() var desLen = deserialized.MarkovChains.Length; // Assert Assert.IsNotNull(deserialized.MarkovChains); - Assert.AreEqual(origLen, desLen); + var markovChainsLen = original.MarkovChains.Length; + Assert.HasCount(markovChainsLen, deserialized.MarkovChains); for (int i = 0; i < original.MarkovChains.Length; i++) { Assert.IsNotNull(deserialized.MarkovChains[i]); - Assert.HasCount(original.MarkovChains[i].Count, deserialized.MarkovChains[i]); + var chainCount = original.MarkovChains[i].Count; + Assert.HasCount(chainCount, deserialized.MarkovChains[i]); for (int j = 0; j < original.MarkovChains[i].Count; j++) { @@ -374,7 +381,8 @@ public void Test_MCMCResults_OutputSerialization() // Assert Assert.IsNotNull(deserialized.Output); - Assert.HasCount(original.Output.Count, deserialized.Output); + var outputCount = original.Output.Count; + Assert.HasCount(outputCount, deserialized.Output); for (int i = 0; i < original.Output.Count; i++) { @@ -429,7 +437,8 @@ public void Test_MCMCResults_MeanLogLikelihoodSerialization() // Assert Assert.IsNotNull(deserialized.MeanLogLikelihood); - Assert.HasCount(original.MeanLogLikelihood.Count, deserialized.MeanLogLikelihood); + var meanLogLikelihoodCount = original.MeanLogLikelihood.Count; + Assert.HasCount(meanLogLikelihoodCount, deserialized.MeanLogLikelihood); for (int i = 0; i < original.MeanLogLikelihood.Count; i++) { @@ -496,7 +505,8 @@ public void Test_MCMCResults_LargeDataSet() var deserializedLen = deserialized.MarkovChains.Length; // Assert Assert.IsNotNull(deserialized); - Assert.AreEqual(origLen, deserializedLen); + var origChainsLen = original.MarkovChains.Length; + Assert.HasCount(origChainsLen, deserialized.MarkovChains); // Verify first and last elements to ensure proper serialization var firstOriginal = original.MarkovChains[0][0]; @@ -539,15 +549,15 @@ public void Test_JsonSerializerOptions_Configuration() // Assert // Verify that WriteIndented is false (no formatting whitespace) - Assert.DoesNotContain(jsonString,("\n")); - Assert.DoesNotContain(jsonString, (" ")); // No indentation + Assert.DoesNotContain("\n",jsonString); + Assert.DoesNotContain(" ",jsonString); // No indentation // Verify that null values are not included (DefaultIgnoreCondition) - Assert.DoesNotContain(jsonString, ("\"ParentDistribution\":null")); + Assert.DoesNotContain("\"ParentDistribution\":null", jsonString); // Verify that fields are included (IncludeFields = true) - Assert.DoesNotContain(jsonString, ("\"AIC\":")); - Assert.DoesNotContain(jsonString, ("\"BIC\":")); + Assert.Contains("\"AIC\":",jsonString); + Assert.Contains("\"BIC\":",jsonString); } #endregion