diff --git a/.github/workflows/dotnet-core.yml b/.github/workflows/dotnet-core.yml index 351e39a60..4ab8244b8 100644 --- a/.github/workflows/dotnet-core.yml +++ b/.github/workflows/dotnet-core.yml @@ -24,6 +24,7 @@ jobs: 5.0.x 6.0.x 7.0.x + 8.0.x - name: Install dependencies run: dotnet restore ${{env.MainSln}} diff --git a/source/Documentation/ML22-23-1 Scalar Encoder with Buckets.pdf b/source/Documentation/ML22-23-1 Scalar Encoder with Buckets.pdf new file mode 100644 index 000000000..60f20bd45 Binary files /dev/null and b/source/Documentation/ML22-23-1 Scalar Encoder with Buckets.pdf differ diff --git a/source/GridCell/GridCell.csproj b/source/GridCell/GridCell.csproj index 68ebe13dd..528adc24c 100644 --- a/source/GridCell/GridCell.csproj +++ b/source/GridCell/GridCell.csproj @@ -1,34 +1,31 @@ - - - net5.0 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + + net8.0 + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/source/HtmActorHost/HtmActorHost.csproj b/source/HtmActorHost/HtmActorHost.csproj index 3c5fb5e45..821cc23c9 100644 --- a/source/HtmActorHost/HtmActorHost.csproj +++ b/source/HtmActorHost/HtmActorHost.csproj @@ -1,50 +1,39 @@  - Exe - net6.0 + net8.0 Linux - full true - full true - - - - - - - - - - - + + + + + + - - PreserveNewest - - + \ No newline at end of file diff --git a/source/ImageEncoder/HtmImageEncoder.csproj b/source/ImageEncoder/HtmImageEncoder.csproj index 75403ed68..91ec8b259 100644 --- a/source/ImageEncoder/HtmImageEncoder.csproj +++ b/source/ImageEncoder/HtmImageEncoder.csproj @@ -1,42 +1,29 @@  - - - net5.0 - enable - license.txt - true - - True - - HtmImageEncoder - - 1.0.1 - - Toan Thanh Truong, Damir Dobric - - HtmImageEncoder - - Package that extends NeocortexApi to work with images through ImageEncoder. It encodes the image to to the SDR. - - https://github.com/ddobric/neocortexapi/tree/ImageBinarizerEncoder/source/ImageEncoder - - https://github.com/ddobric/neocortexapi/tree/ImageBinarizerEncoder - - 1.0.1 - - 1.0.1 - - - - - True - - - - - - - - - - + + net8.0 + enable + license.txt + true + True + HtmImageEncoder + 1.0.1 + Toan Thanh Truong, Damir Dobric + HtmImageEncoder + Package that extends NeocortexApi to work with images through ImageEncoder. It encodes the image to to the SDR. + https://github.com/ddobric/neocortexapi/tree/ImageBinarizerEncoder/source/ImageEncoder + https://github.com/ddobric/neocortexapi/tree/ImageBinarizerEncoder + 1.0.1 + 1.0.1 + + + + True + + + + + + + + + \ No newline at end of file diff --git a/source/NeoCortexApi.Experiments/NeoCortexApi.Experiments.csproj b/source/NeoCortexApi.Experiments/NeoCortexApi.Experiments.csproj index 2bb3f5af0..36c836b16 100644 --- a/source/NeoCortexApi.Experiments/NeoCortexApi.Experiments.csproj +++ b/source/NeoCortexApi.Experiments/NeoCortexApi.Experiments.csproj @@ -1,20 +1,15 @@ - - net5.0 - + net8.0 false - - - @@ -23,7 +18,6 @@ - PreserveNewest @@ -44,5 +38,4 @@ PreserveNewest - - + \ No newline at end of file diff --git a/source/NeoCortexApi.sln b/source/NeoCortexApi.sln index fd63cb329..424f487d3 100644 --- a/source/NeoCortexApi.sln +++ b/source/NeoCortexApi.sln @@ -1,4 +1,3 @@ - Microsoft Visual Studio Solution File, Format Version 12.00 # Visual Studio Version 17 VisualStudioVersion = 17.0.32014.148 @@ -53,7 +52,11 @@ Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "nuget", "nuget", "{9475B9AE EndProject Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "GridCell", "GridCell\GridCell.csproj", "{E102D57D-BA8F-4E21-8365-8ABAFB5D2C94}" EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "NeoCortexApiAnomaly", "Samples\NeoCortexApiAnomaly\NeoCortexApiAnomaly.csproj", "{7F272910-3A59-4BBB-8888-9A7F695CA754}" +Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "NeoCortexApiAnomaly", "Samples\NeoCortexApiAnomaly\NeoCortexApiAnomaly.csproj", "{7F272910-3A59-4BBB-8888-9A7F695CA754}" +EndProject +Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "NeocortexApiLLMSample", "NeocortexApiLLMSample\NeocortexApiLLMSample.csproj", "{8D1F778F-AF73-44CF-965E-C061C07B99A7}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "ApproveMultiSequenceLearning", "Samples\ApproveMultiSequenceLearning\ApproveMultiSequenceLearning.csproj", "{D56D5B70-0094-41F0-9FFD-BCAB50BA7737}" EndProject Global GlobalSection(SolutionConfigurationPlatforms) = preSolution @@ -117,6 +120,14 @@ Global {7F272910-3A59-4BBB-8888-9A7F695CA754}.Debug|Any CPU.Build.0 = Debug|Any CPU {7F272910-3A59-4BBB-8888-9A7F695CA754}.Release|Any CPU.ActiveCfg = Release|Any CPU {7F272910-3A59-4BBB-8888-9A7F695CA754}.Release|Any CPU.Build.0 = Release|Any CPU + {8D1F778F-AF73-44CF-965E-C061C07B99A7}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {8D1F778F-AF73-44CF-965E-C061C07B99A7}.Debug|Any CPU.Build.0 = Debug|Any CPU + {8D1F778F-AF73-44CF-965E-C061C07B99A7}.Release|Any CPU.ActiveCfg = Release|Any CPU + {8D1F778F-AF73-44CF-965E-C061C07B99A7}.Release|Any CPU.Build.0 = Release|Any CPU + {D56D5B70-0094-41F0-9FFD-BCAB50BA7737}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {D56D5B70-0094-41F0-9FFD-BCAB50BA7737}.Debug|Any CPU.Build.0 = Debug|Any CPU + {D56D5B70-0094-41F0-9FFD-BCAB50BA7737}.Release|Any CPU.ActiveCfg = Release|Any CPU + {D56D5B70-0094-41F0-9FFD-BCAB50BA7737}.Release|Any CPU.Build.0 = Release|Any CPU EndGlobalSection GlobalSection(SolutionProperties) = preSolution HideSolutionNode = FALSE @@ -126,6 +137,8 @@ Global {15375C28-1424-4A29-B2AF-11ED92F6134C} = {99AE2313-7D07-4905-A0BB-52D304FCC2F7} {AE2CF42C-DC7D-4F0A-8B43-84A5D81E1D72} = {99AE2313-7D07-4905-A0BB-52D304FCC2F7} {7F272910-3A59-4BBB-8888-9A7F695CA754} = {99AE2313-7D07-4905-A0BB-52D304FCC2F7} + {8D1F778F-AF73-44CF-965E-C061C07B99A7} = {99AE2313-7D07-4905-A0BB-52D304FCC2F7} + {D56D5B70-0094-41F0-9FFD-BCAB50BA7737} = {99AE2313-7D07-4905-A0BB-52D304FCC2F7} EndGlobalSection GlobalSection(ExtensibilityGlobals) = postSolution SolutionGuid = {71185BAD-1342-4836-BCDA-6EC97118F92D} diff --git a/source/NeoCortexApi/Encoders/EncoderBase.cs b/source/NeoCortexApi/Encoders/EncoderBase.cs index b63cad9f5..141d00622 100644 --- a/source/NeoCortexApi/Encoders/EncoderBase.cs +++ b/source/NeoCortexApi/Encoders/EncoderBase.cs @@ -5,6 +5,8 @@ using System; using System.Collections.Generic; using System.IO; +using System.Linq; +using System.Security.Cryptography; using System.Text; namespace NeoCortexApi.Encoders @@ -33,9 +35,9 @@ public abstract class EncoderBase : IHtmModule, ISerializable protected int nInternal; protected double rangeInternal; - + protected bool encLearningEnabled; - + protected List flattenedFieldTypeList; protected Dictionary, List> decoderFieldTypes; @@ -50,6 +52,7 @@ public abstract class EncoderBase : IHtmModule, ISerializable // Moved to MultiEncoder. //protected Dictionary> encoders; protected List scalarNames; + private object[] encoders; /// /// Default constructor. @@ -87,6 +90,11 @@ public void Initialize(Dictionary encoderSettings) Radius = -1.0; Periodic = false; ClipInput = false; + NumBits = 0; + PeriodicRadius = 0; + BucketWidth = 0; + NumBuckets = 0; + foreach (var item in encoderSettings) { @@ -132,7 +140,7 @@ public object this[string key] } } - + /// /// In real cortex mode, W must be >= 21. Empirical value. @@ -146,6 +154,21 @@ public object this[string key] public int N { get => (int)this["N"]; set => this["N"] = (int)value; } + public int Verbosity { get => (int)this["Verbosity"]; set => this["Verbosity"] = (int)value; } + + public int startIdx { get => (int)this["startIdx"]; set => this["startIdx"] = (int)value; } + public int runLength { get => (int)this["runLength"]; set => this["runLength"] = (int)value; } + public bool[] tmpOutput { get => (bool[])this["tmpOutput"]; set => this["tmpOutput"] = (bool[])value; } + + + public double run { get => (double)this["run"]; set => this["run"] = (double)value; } + /// + /// public double nz { get => (double)this["nz"]; set => this["nz"] = (double)value; } + /// + public double runs { get => (double)this["runs"]; set => this["runs"] = (double)value; } + + + public int NInternal { get => (int)this["NInternal"]; set => this["NInternal"] = (int)value; } /// @@ -153,6 +176,8 @@ public object this[string key] /// public int W { get => (int)this["W"]; set => this["W"] = (int)value; } + + public double MinVal { get => (double)this["MinVal"]; set => this["MinVal"] = (double)value; } public double MaxVal { get => (double)this["MaxVal"]; set => this["MaxVal"] = (double)value; } @@ -175,6 +200,12 @@ public object this[string key] /// public bool ClipInput { get => (bool)this["ClipInput"]; set => this["ClipInput"] = (bool)value; } + public int NumBits { get; private set; } + public double PeriodicRadius { get; private set; } + public double BucketWidth { get; private set; } + public int NumBuckets { get; private set; } + public double[] Centers { get; private set; } + public int Padding { get => (int)this["Padding"]; set => this["Padding"] = value; } public double Range { get => (double)this["Range"]; set => this["Range"] = value; } @@ -203,6 +234,7 @@ public object this[string key] /// Returns true if the underlying encoder works on deltas /// public abstract bool IsDelta { get; } + public (object name, object enc, object offset) encoder { get; private set; } #endregion /// @@ -260,6 +292,47 @@ public int[] RightVecProd(SparseObjectMatrix matrix, int[] encoded) } + + /// + /// This method maps a value from one range to another range. + ///It takes in the value, the minimum and maximum of the input range, and the minimum and maximum of the output range as parameters. + ///The method then returns the corresponding value in the output range based on the input value and input-output range relationship. + /// + /// + /// + /// + /// + /// + /// + public static double map(double val, double fromMin, double fromMax, double toMin, double toMax) + { + return (val - fromMin) * (toMax - toMin) / (fromMax - fromMin) + toMin; + } + + + /// + ///This method wraps an input value within a specified range, so that it always falls within the range. + /// If the input value is outside the range, it is wrapped around to the other side of the range until it falls within the range. + /// The range is defined by a minimum and maximum value. + /// + /// + /// + /// + /// + public static int wrap(int val, int minVal, int maxVal) + { + int range = maxVal - minVal + 1; + while (val < minVal) + { + val += range; + } + while (val > maxVal) + { + val -= range; + } + return val; + } + /// /// Returns the rendered similarity matrix for the whole rage of values between min and max. /// @@ -270,7 +343,7 @@ public string TraceSimilarities(bool traceValues = true) Dictionary sdrMap = new Dictionary(); List inpVals = new List(); StringBuilder sb = new StringBuilder(); - + for (double i = this.MinVal; i < this.MaxVal; i += 1.0) { var sdr = this.Encode(i); @@ -280,7 +353,7 @@ public string TraceSimilarities(bool traceValues = true) if (traceValues) { sb.AppendLine($"{i.ToString("000")} - {Helpers.StringifyVector(sdr, separator: null)}"); - } + } } sb.AppendLine(); @@ -319,8 +392,8 @@ public bool Equals(EncoderBase other) public void Serialize(object obj, string name, StreamWriter sw) { - var excludeMembers = new List - { + var excludeMembers = new List + { nameof(EncoderBase.Properties), nameof(EncoderBase.halfWidth), nameof(EncoderBase.rangeInternal), diff --git a/source/NeoCortexApi/Encoders/ScalarEncoder.cs b/source/NeoCortexApi/Encoders/ScalarEncoder.cs index 134bf272e..f1bb23fc3 100644 --- a/source/NeoCortexApi/Encoders/ScalarEncoder.cs +++ b/source/NeoCortexApi/Encoders/ScalarEncoder.cs @@ -2,11 +2,21 @@ // Licensed under the Apache License, Version 2.0. See LICENSE in the project root for license information. using NeoCortexApi.Entities; using NeoCortexApi.Utility; +using NeoCortexEntities.NeuroVisualizer; +using Newtonsoft.Json.Linq; using System; +using System.Buffers.Text; +using System.Collections; using System.Collections.Generic; using System.Diagnostics; using System.Globalization; using System.IO; +using System.Linq; +using System.Net.Sockets; +using System.Security.Cryptography; +using System.Text; +using System.Threading.Tasks; +using System.Xml.Linq; namespace NeoCortexApi.Encoders { @@ -17,22 +27,18 @@ namespace NeoCortexApi.Encoders /// public class ScalarEncoder : EncoderBase { + private int v1; + private int v2; + private int v3; + private bool v4; + /// /// Gets a value indicating whether IsDelta /// public override bool IsDelta => throw new NotImplementedException(); - /// - /// Gets the Width - /// public override int Width => throw new NotImplementedException(); - /// - /// Initializes a new instance of the class. - /// - public ScalarEncoder() - { - } /// /// Initializes a new instance of the class. @@ -43,6 +49,14 @@ public ScalarEncoder(Dictionary encoderSettings) this.Initialize(encoderSettings); } + public ScalarEncoder(int v1, int v2, int v3, bool v4) + { + this.v1 = v1; + this.v2 = v2; + this.v3 = v3; + this.v4 = v4; + } + /// /// The AfterInitialize /// @@ -75,7 +89,7 @@ public override void AfterInitialize() // each case here. InitEncoder(W, MinVal, MaxVal, N, Radius, Resolution); - //nInternal represents the output _area excluding the possible padding on each side + //nInternal represents the output area excluding the possible padding on each side NInternal = N - 2 * Padding; if (Name == null) @@ -102,10 +116,21 @@ public override void AfterInitialize() } } - + /// + /// This method initializes the encoder with the given parameters, such as w, minVal, maxVal, n, radius, and resolution. + ///It checks if the encoder is already initialized and if minVal and maxVal are valid. + ///If N is not set, it calculates N based on the given parameters and sets the Range and Resolution values accordingly. + /// + /// + /// + /// + /// + /// + /// + /// protected void InitEncoder(int w, double minVal, double maxVal, int n, double radius, double resolution) { - if (n != 0) + if (N != 0) { if (double.NaN != minVal && double.NaN != maxVal) { @@ -160,13 +185,187 @@ protected void InitEncoder(int w, double minVal, double maxVal, int n, double ra } } + /// + /// This method decodes an array of outputs based on the provided parameters, and returns an array of decoded inputs. + ///The decoding process involves identifying the runs of 1s in the output array, and mapping those runs to ranges of input + ///values based on the specified minimum and maximum values. + ///If the periodic parameter is set to true, the decoded input array is checked for periodicity and adjusted if necessary. + /// + /// + /// + /// + /// + /// + /// + /// + public static int[] Decode(int[] output, int minVal, int maxVal, int n, double w, bool periodic) + { + // Identify the runs of 1s in the output array + List runs = new List(); + int start = -1; + int prev = 0; + int count = 0; + for (int i = 0; i < output.Length; i++) + { + if (output[i] == 0) + { + if (start != -1) + { + runs.Add(new int[] { start, prev, count }); + start = -1; + count = 0; + } + } + else + { + if (start == -1) + { + start = i; + } + prev = i; + count++; + } + } + if (start != -1) + { + runs.Add(new int[] { start, prev, count }); + } + // Adjust periodic input space if necessary + if (periodic && runs.Count > 1) + { + int[] first = runs[0]; + int[] last = runs[runs.Count - 1]; + if (first[0] == 0 && last[1] == output.Length - 1) + { + first[1] = last[1]; + first[2] += last[2]; + runs.RemoveAt(runs.Count - 1); + } + } + // Map the runs of 1s to ranges of input values based on the specified parameters + List input = new List(); + foreach (int[] run in runs) + { + int left = (int)Math.Floor(run[0] + 0.5 * (run[2] - w)); + int right = (int)Math.Floor(run[1] - 0.5 * (run[2] - w)); + if (left < 0 && periodic) + { + left += output.Length; + right += output.Length; + } + for (int i = left; i <= right; i++) + { + int val = (int)Math.Round(map(i, 0, output.Length - 1, minVal, maxVal)); + if (periodic) + { + val = wrap(val, minVal, maxVal); + } + if (val >= minVal && val <= maxVal) + { + input.Add(val); + } + } + } + // Sort the decoded input array and adjust periodic input space if necessary + input.Sort(); + if (periodic && input.Count > 0) + { + int max = input[input.Count - 1]; + if (max > maxVal) + { + List input2 = new List(); + foreach (int val in input) + { + if (val <= maxVal) + { + input2.Add(val); + } + } + input = input2; + input2 = new List(); + foreach (int val in input) + { + if (val >= minVal) + { + input2.Add(val); + } + } + input = input2; + } + } + return input.ToArray(); + } + + + /// - /// Gets the index of the first non-zero bit. + /// This method encodes the input into an array of active bits using a scalar encoder + /// It takes into account both periodic and non-periodic encoders + /// The active bits are set based on the bucket index calculated for the input value /// /// - /// Null in a case of an error. - /// + /// The array of active bits. + public bool[] EncodeIntoArray(double inputData, bool[] output) + { + double input = Convert.ToDouble(inputData, CultureInfo.InvariantCulture); + if (input == double.NaN) + { + return output; + } + + int? bucketVal = GetFirstOnBit(input); + if (bucketVal != null) + { + int bucketIdx = bucketVal.Value; + //Arrays.fill(output, 0); + int minbin = bucketIdx; + int maxbin = minbin + 2 * HalfWidth; + // Adjust bins for periodic encoders + if (Periodic) + { + if (maxbin >= N) + { + int bottombins = maxbin - N + 1; + for (int i = 0; i < bottombins; i++) + { + output[i] = true; + } + maxbin = N - 1; + } + if (minbin < 0) + { + int topbins = -minbin; + for (int i = 0; i < topbins; i++) + { + output[N - i - 1] = true; + } + minbin = 0; + } + } + // Set active bits for the calculated bin range + for (int i = minbin; i <= maxbin; i++) + { + output[i] = true; + } + } + + // Output 1-D array of same length resulted in parameter N + return output; + } + + + + + + /// + /// Given an input value, returns the index of the first non-zero bit in the + /// corresponding binary array. + /// + /// The input value to be encoded into a binary array. + /// The index of the first non-zero bit in the binary array for the given input value. + /// Returns null if the input value is NaN. + /// Thrown when the input value is out of range or invalid. protected int? GetFirstOnBit(double input) { if (input == double.NaN) @@ -175,10 +374,12 @@ protected void InitEncoder(int w, double minVal, double maxVal, int n, double ra } else { + // Check if input value is within the specified range if (input < MinVal) { if (ClipInput && !Periodic) { + // Clip the input value to the minimum value if ClipInput flag is set Debug.WriteLine("Clipped input " + Name + "=" + input + " to minval " + MinVal); input = MinVal; @@ -189,7 +390,7 @@ protected void InitEncoder(int w, double minVal, double maxVal, int n, double ra } } } - + // Check if input value is within the periodic range if (Periodic) { if (input >= MaxVal) @@ -197,12 +398,14 @@ protected void InitEncoder(int w, double minVal, double maxVal, int n, double ra throw new ArgumentException($"Input ({input}) greater than periodic range ({MinVal} - {MaxVal}"); } } + // Check if input value is within the non-periodic range else { if (input > MaxVal) { if (ClipInput) { + // Clip the input value to the maximum value if ClipInput flag is set Debug.WriteLine($"Clipped input {Name} = {input} to maxval MaxVal"); input = MaxVal; @@ -215,6 +418,7 @@ protected void InitEncoder(int w, double minVal, double maxVal, int n, double ra } int centerbin; + // Calculate the center bin index based on whether the encoder is periodic or not if (Periodic) { centerbin = (int)((input - MinVal) * NInternal / Range + Padding); @@ -223,7 +427,7 @@ protected void InitEncoder(int w, double minVal, double maxVal, int n, double ra { centerbin = ((int)(((input - MinVal) + Resolution / 2) / Resolution)) + Padding; } - + // Return the index of the first non-zero bit in the binary array for the given input value return centerbin - HalfWidth; } @@ -234,22 +438,174 @@ protected void InitEncoder(int w, double minVal, double maxVal, int n, double ra /// The data to be encoded. Must be of type double. /// The bucket index. /// - public int? GetBucketIndex(object inputData) + + public int? GetBucketIndex(decimal inputData) { - double input = Convert.ToDouble(inputData, CultureInfo.InvariantCulture); - if (input == double.NaN) + if ((double)inputData < MinVal || (double)inputData > MaxVal) { return null; } - int? bucketVal = GetFirstOnBit(input); + decimal fraction = (decimal)(((double)inputData - MinVal) / (MaxVal - MinVal)); + + if (Periodic) + { + fraction = fraction - Math.Floor(fraction); + } + + int bucketIndex = (int)Math.Floor(fraction * N); + + if (bucketIndex == N) + { + bucketIndex = 0; + } + + // For periodic encoders, the center of the first bucket is considered equal to the center of the last bucket + if (Periodic && bucketIndex == 0 && Math.Abs((double)inputData - MaxVal) <= 0.0000000000000000000000000001) + { + bucketIndex = N - 1; + } + + // Check if the input value is within the radius of the bucket + if (Radius >= 0) + { + decimal bucketWidth = ((decimal)MaxVal - (decimal)MinVal) / (decimal)N; + decimal bucketCenter = (bucketWidth * bucketIndex) + (bucketWidth / 2) + (decimal)MinVal; + + if (Math.Abs((decimal)inputData - bucketCenter) > (decimal)Radius * bucketWidth) + { + return null; + } + } + + return bucketIndex; - return bucketVal; + } + + /// + /// This code calculates bucket information for a scalar value based on the provided encoder parameters. + /// It first clips the input value to the specified range, calculates the bucket index and center, and then + /// calculates the bucket bounds. It also handles periodic encoding by wrapping the bucket index and choosing + /// the closest edge as the bucket center. The function returns an integer array containing the bucket index, + /// the rounded bucket center, and the rounded bucket start and end points. + /// + /// + /// + public int[] GetBucketInfo(double input) + { + // Clip input to range + if (input < MinVal) + { + input = MinVal; + } + else if (input > MaxVal) + { + input = MaxVal; + } + + // Calculate bucket index + double bucketWidth = (MaxVal - MinVal) / N; + int bucketIndex = (int)((input - MinVal) / bucketWidth); + + // Calculate bucket center + double bucketCenter = MinVal + (bucketIndex + 0.5) * bucketWidth; + + // Calculate bucket bounds + double bucketStart = MinVal + bucketIndex * bucketWidth; + double bucketEnd = MinVal + (bucketIndex + 1) * bucketWidth; + + // Handle periodic encoding + if (Periodic) + { + // Wrap bucket index + if (bucketIndex < 0) + { + bucketIndex += N; + } + else if (bucketIndex >= N) + { + bucketIndex -= N; + } + + // Calculate distance to nearest edge + double distToStart = input - bucketStart; + double distToEnd = bucketEnd - input; + + if (distToStart < 0) + { + distToStart += MaxVal - MinVal; + } + if (distToEnd < 0) + { + distToEnd += MaxVal - MinVal; + } + + // Choose the closest edge as bucket center + if (distToStart < distToEnd) + { + bucketCenter = bucketStart; + } + else + { + bucketCenter = bucketEnd; + } + } + + return new int[] { bucketIndex, (int)Math.Round(bucketCenter), (int)Math.Round(bucketStart), (int)Math.Round(bucketEnd) }; + } + + + + /// + /// Generates a string description of a list of ranges. + /// + /// A list of Tuple values representing the start and end values of each range. + /// A string representation of the ranges, where each range is separated by a comma and space. + public string GenerateRangeDescription(List> ranges) + { + var desc = ""; + var numRanges = ranges.Count; + for (var i = 0; i < numRanges; i++) + { + if (ranges[i].Item1 != ranges[i].Item2) + { + desc += $"{ranges[i].Item1:F2}-{ranges[i].Item2:F2}"; + } + else + { + desc += $"{ranges[i].Item1:F2}"; + } + + if (i < numRanges - 1) + { + desc += ", "; + } + } + + return desc; + } + + private string DecodedToStr(Tuple, string>>, List> tuple) + { + throw new NotImplementedException(); + } + + private void PPrint(double[] output) + { + throw new NotImplementedException(); + } + + private Tuple, string>>, List> Decode(double[] output) + { + throw new NotImplementedException(); } /// - /// Encodes the given scalar value as SDR as defined by HTM. + /// This method encodes an input value using the Scalar Encoder algorithm and returns an integer array as the output. + ///The input value is first converted to a double using the CultureInfo.InvariantCulture format. + ///The method checks if the input value is NaN and returns null if it is, otherwise it proceeds with encoding the value + ///into an integer array using the Scalar Encoder algorithm. /// /// The inputData /// The @@ -297,6 +653,63 @@ public override int[] Encode(object inputData) } + /// + /// Calculates closeness scores between expected and actual values using the ScalarEncoder's parameters. + /// + /// Array of expected values. + /// Array of actual values. + /// Flag to determine whether fractional or absolute closeness score should be returned. + /// An array of closeness scores. + public double[] ClosenessScores(double[] expValues, double[] actValues, bool fractional = true) + { + // Get the first value from both arrays. + double expValue = expValues[0]; + double actValue = actValues[0]; + + // Calculate the absolute difference between the two values, considering periodicity if enabled. + double err; + + if (Periodic) + { + expValue = expValue % MaxVal; + actValue = actValue % MaxVal; + err = Math.Min(Math.Abs(expValue - actValue), MaxVal - Math.Abs(expValue - actValue)); + } + else + { + err = Math.Abs(expValue - actValue); + } + + // Calculate the closeness score. + double closeness; + if (fractional) + { + // Calculate the maximum possible range of values, considering clipping and periodicity. + double range = (MaxVal - MinVal) + (ClipInput ? 0 : (2 * (MaxVal - MinVal) / (N - 1))); + + // Calculate the percentage of error relative to the maximum range. + double pctErr = err / range; + + // Cap the percentage at 100% to ensure that closeness score is always >= 0. + pctErr = Math.Min(1.0, pctErr); + + // Calculate the closeness score. + closeness = 1.0 - pctErr; + } + else + { + // Calculate the absolute closeness score. + closeness = err; + } + + // Return an array containing the calculated closeness score. + return new double[] { closeness }; + } + + + + + /// /// This method enables running in the network. /// @@ -318,10 +731,122 @@ public override List GetBucketValues() throw new NotImplementedException(); } - //public static object Deserialize(StreamReader sr, string name) - //{ - // var excludeMembers = new List { nameof(ScalarEncoder.Properties) }; - // return HtmSerializer2.DeserializeObject(sr, name, excludeMembers); - //} + + /// + /// Calculates the lower and upper bounds of a given input value in a range of values. + /// Throws an exception if the input value is outside the encoder's range or is not a valid number. + /// + /// The input value to be bucketized. + /// An array containing the bucket lower and upper bounds. + /// Thrown when the input value is not a valid number or is outside of the encoder's range. + /// Thrown when the bucket width is not valid. + public double[] GetBucketValues(double input) + { + // Check for edge cases + if (double.IsNaN(input) || double.IsInfinity(input)) + { + throw new ArgumentException("Input value is not a valid number."); + } + if (input < this.MinVal || input >= this.MaxVal) + { + throw new ArgumentException("Input value is outside of the encoder's range."); + } + int NumBuckets = 100; + // Calculate the width of each bucket + double bucketWidth = (this.MaxVal - this.MinVal) / (double)this.NumBuckets; + if (double.IsInfinity(bucketWidth) || double.IsNaN(bucketWidth) || bucketWidth <= 0.0) + { + throw new InvalidOperationException("Bucket width is not valid."); + } + + Console.WriteLine("bucketWidth: " + bucketWidth); + + // Calculate the index of the bucket that the input falls into + int bucketIndex = (int)((input - this.MinVal) / bucketWidth); + Console.WriteLine("bucketIndex: " + bucketIndex); + + // Calculate the lower and upper bounds of the bucket + double bucketLowerBound = bucketIndex * bucketWidth + this.MinVal; + Console.WriteLine("bucketLowerBound: " + bucketLowerBound); + + double bucketUpperBound = (bucketIndex + 1) * bucketWidth + this.MinVal; + Console.WriteLine("bucketUpperBound: " + bucketUpperBound); + + // Return the bucket values + return new double[] { bucketLowerBound, bucketUpperBound }; + } + + + /// + /// Returns an array of binary values representing the mapping of an input value to a set of buckets. + /// + /// The input value to be mapped to buckets. + /// Specifies whether the encoder is periodic or not. + /// The number of buckets to be used for mapping. + /// An array of binary values representing the mapping of the input value to the buckets. + public int[] _getTopDownMapping(double input, bool Periodic, int numBuckets) + { + int[] mapping = new int[numBuckets]; + + // If the encoder is periodic + if (Periodic) + { + // Calculate the width of each bucket + double bucketWidth = 1.0 / numBuckets; + + // Calculate the index of the bucket for the input value + int bucketIndex = (int)Math.Floor(input / bucketWidth); + + // Loop through each bucket + for (int i = 0; i < numBuckets; i++) + { + // Calculate the distance between the input value and the bucket + double dist = Math.Abs(i - bucketIndex) * bucketWidth; + + // Set the mapping value based on the distance + mapping[i] = (dist <= bucketWidth / 2) ? 1 : 0; + } + } + // If the encoder is not periodic + else + { + // Get the maximum and minimum value and the radius from the encoder parameters + double maxVal = MaxVal; + double minVal = MinVal; + double radius = Radius; + + // If the radius is not specified + if (radius == -1) + { + // Calculate the radius based on the number of buckets + radius = (maxVal - minVal) / numBuckets / 2; + } + + // Calculate the width and half-width of each bucket + double bucketWidth = radius * 2; + double halfBucket = bucketWidth / 2.0; + + // Calculate the index of the bucket for the input value + int bucketIndex = (int)Math.Floor((input - minVal + radius) / bucketWidth); + + // Loop through each bucket + for (int i = 0; i < numBuckets; i++) + { + // Calculate the start value of the bucket + double bucketStart = (i * bucketWidth) + minVal - radius; + + // Calculate the distance between the input value and the start of the bucket + double dist = Math.Abs(bucketStart - input); + + // Set the mapping value based on the distance + mapping[i] = (dist <= halfBucket) ? 1 : 0; + } + } + + // Return the mapping array + return mapping; + } + + } } \ No newline at end of file diff --git a/source/NeoCortexApi/SpatialPooler.cs b/source/NeoCortexApi/SpatialPooler.cs index 5b9e59a23..b439dc033 100644 --- a/source/NeoCortexApi/SpatialPooler.cs +++ b/source/NeoCortexApi/SpatialPooler.cs @@ -131,13 +131,8 @@ public virtual void InitMatrices(Connections conn, DistributedMemory distMem) colList.Add(new KeyPair() { Key = i, Value = new Column(numCells, i, conn.HtmConfig.SynPermConnected, conn.HtmConfig.NumInputs) }); } - Stopwatch sw = new Stopwatch(); - sw.Start(); - conn.Memory.set(colList); - - sw.Stop(); - + //Initialize state meta-management statistics conn.HtmConfig.OverlapDutyCycles = new double[numColumns]; conn.HtmConfig.ActiveDutyCycles = new double[numColumns]; diff --git a/source/NeoCortexUtils/NeoCortexUtils.csproj b/source/NeoCortexUtils/NeoCortexUtils.csproj index 0d453c4bc..34a46e081 100644 --- a/source/NeoCortexUtils/NeoCortexUtils.csproj +++ b/source/NeoCortexUtils/NeoCortexUtils.csproj @@ -1,24 +1,19 @@  - - net5.0 + net8.0 - C:\dev\NeoCortexAPI\neocortexapi\source\NeoCortexUtils\NeoCortexUtils.xml full true - full true - - + - - + \ No newline at end of file diff --git a/source/NeoCortexUtils/docs/ApproveMultiSequenceLearning.md b/source/NeoCortexUtils/docs/ApproveMultiSequenceLearning.md new file mode 100644 index 000000000..280bb24e4 --- /dev/null +++ b/source/NeoCortexUtils/docs/ApproveMultiSequenceLearning.md @@ -0,0 +1,288 @@ +Approve Prediction of Multisequence Learning + +## Introduction +In this project, we have tried to implement new methodologies within the MultisequenceLearning algorithm framework. These methods are designed to autonomously load datasets from a specified path using HelperMethods.ReadDataset(datasetPath). Additionally, we have stored test data in a separate file, which is similarly accessed through HelperMethods.ReadDataset(testsetPath) for subsequent testing of subsequences. The function RunMultiSequenceLearningExperiment(sequences, sequencesTest) is employed to process multiple sequences and test subsequences, received respectively as sequences and sequencesTest. Upon completion of the learning phase, the system calculates the accuracy of the predictions made by the algorithm. + +## Implementation + +![image](./images/overview.png) + +Fig: Schematic Diagram of Approval Prediction in Multisequence Learning Architecture + +Above the flow of implementation of our project. + +`Sequence` is the model of how we process and store the dataset. And can be seen below: + +```csharp +public class Sequence +{ + public String name { get; set; } + public int[] data { get; set; } +} +``` + +eg: +- Dataset + +```json +[ + { + "name": "S1", + "data": [ 0, 2, 5, 6, 7, 8, 10, 11, 13 ] + }, + { + "name": "S2", + "data": [ 1, 2, 3, 4, 6, 11, 12, 13, 14 ] + }, + { + "name": "S3", + "data": [ 1, 2, 3, 4, 7, 8, 10, 12, 14 ] + } +] +``` + +- Test Dataset + +```json +[ + { + "name": "T1", + "data": [ 1, 2, 4 ] + }, + { + "name": "T2", + "data": [ 2, 3, 4 ] + }, + { + "name": "T3", + "data": [ 4, 5, 7 ] + }, + { + "name": "T4", + "data": [ 5, 8, 9 ] + } +] + +``` + +Our implemented methods are in `HelperMethod.cs` and can be found [here](../HelperMethods.cs): + +1. FetchHTMConfig() + +Here we save the HTMConfig which is used for Hierarchical Temporal Memory to `Connections` + +```csharp +/// +/// HTM Config for creating Connections +/// +/// input bits +/// number of columns +/// Object of HTMConfig +public static HtmConfig FetchHTMConfig(int inputBits, int numColumns) +{ + HtmConfig cfg = new HtmConfig(new int[] { inputBits }, new int[] { numColumns }) + { + Random = new ThreadSafeRandom(42), + + CellsPerColumn = 25, + GlobalInhibition = true, + LocalAreaDensity = -1, + NumActiveColumnsPerInhArea = 0.02 * numColumns, + PotentialRadius = (int)(0.15 * inputBits), + MaxBoost = 10.0, + DutyCyclePeriod = 25, + MinPctOverlapDutyCycles = 0.75, + MaxSynapsesPerSegment = (int)(0.02 * numColumns), + ActivationThreshold = 15, + ConnectedPermanence = 0.5,e. + PermanenceDecrement = 0.25, + PermanenceIncrement = 0.15, + PredictedSegmentDecrement = 0.1, + }; + + return cfg; +} +``` + +All the fields are self-explanatory as per HTM theory. + +2. getEncoder() + +We have used `ScalarEncoder` since we are encoding all numeric values only. + +Remeber that `inputBits` is same as `HTMConfig`. + +```csharp +/// +/// Get the encoder with settings +/// +/// input bits +/// Object of EncoderBase +public static EncoderBase GetEncoder(int inputBits) +{ + double max = 20; + + Dictionary settings = new Dictionary() + { + { "W", 15}, + { "N", inputBits}, + { "Radius", -1.0}, + { "MinVal", 0.0}, + { "Periodic", false}, + { "Name", "scalar"}, + { "ClipInput", false}, + { "MaxVal", max} + }; + + EncoderBase encoder = new ScalarEncoder(settings); + + return encoder; +} +``` + +Note that `MaxValue` for the encoder is set to `20` which can be changed but then this value should be matched while creating the synthetic dataset. + +3. ReadDataset() + +Reads the JSON file when passed as a full path and returns the object of the list of `Sequence` + +```csharp +/// +/// Reads dataset from the file +/// +/// full path of the file +/// Object of list of Sequence +public static List ReadDataset(string path) +{ + Console.WriteLine("Reading Sequence..."); + String lines = File.ReadAllText(path); + //var sequence = JsonConvert.DeserializeObject(lines); + List sequence = System.Text.Json.JsonSerializer.Deserialize>(lines); + + return sequence; +} +``` + +4. CreateDataset() + +We have advanced our approach to dataset creation by automating the process, eliminating the need for time-consuming manual intervention. In this improved system, datasets are generated based on specified parameters. These include numberOfSequence, which determines the number of sequences to be created; size, which defines the length of each sequence; and startVal and endVal, which set the starting and ending range values for the sequences, respectively. This streamlined method enhances efficiency and accuracy in dataset generation. + +```csharp +/// +/// Creates a list of Sequence as per configuration +/// +/// Object of list of Sequence +public static List CreateDataset() +{ + int numberOfSequence = 3; + int size = 12; + int startVal = 0; + int endVal = 15; + Console.WriteLine("Creating Sequence..."); + List sequence = HelperMethods.CreateSequences(numberOfSequence, size, startVal, endVal); + + return sequence; +} +``` + +Note that `endVal` should be less than equal to `MaxVal` of `ScalarEncoder` used above + +5. SaveDataset() + +Saves the dataset in the `dataset` director of the `BasePath` of the application where it is running. + +```csharp +/// +/// Saves the dataset in the 'dataset' folder in BasePath of the application +/// +/// Object of list of Sequence +/// Full path of the dataset +public static string SaveDataset(List sequences) +{ + string BasePath = AppDomain.CurrentDomain.BaseDirectory; + string reportFolder = Path.Combine(BasePath, "dataset"); + if (!Directory.Exists(reportFolder)) + Directory.CreateDirectory(reportFolder); + string reportPath = Path.Combine(reportFolder, $"dataset_{DateTime.Now.Ticks}.json"); + + Console.WriteLine("Saving dataset..."); + + if (!File.Exists(reportPath)) + { + using (StreamWriter sw = File.CreateText(reportPath)) + { + sw.WriteLine(JsonConvert.SerializeObject(sequences)); + } + } + + return reportPath; +} +``` + +6. Calculating accuracy in PredictNextElement() in `Program.cs` + +![image](./images/approve_prediction.png) + +Fig: Predictions and calculating accuracy + +```csharp +int matchCount = 0; +int predictions = 0; +double accuracy = 0.0; + +foreach (var item in list) +{ + Predict(); + //compare current element with prediction of previous element + if(item == Int32.Parse(prediction.Last())) + { + matchCount++; + } + predictions++; + accuracy = (double)matchCount / predictions * 100; +} +``` + +Note that the prediction code is omitted. + +## How to run the project + +### To create a synthetic dataset + +1. Open the [sln](../../../NeoCortexApi.sln) and select `MultiSequenceLearning` as startup project. + +2. In `Program.cs` we have the `Main()`. Uncomment the below code to create a synthetic dataset. + +```csharp +//to create a synthetic dataset +string path = HelperMethods.SaveDataset(HelperMethods.CreateDataset()); +Console.WriteLine($"Dataset saved: {path}"); +``` + +*and comment rest of the lines*. + +3. Run to create the dataset and save the path of the dataset folder and name. + +![dataset](./images/dataset.jpg) + +### To run the experiment + +1. Open the [NeoCortexApi.sln](../../../NeoCortexApi.sln) and select `MultiSequenceLearning` as startup project. + +2. In `Program.cs` we have the `Main()`. Change the name of `dataset` file saved from previous run as seen below: + +```csharp +//to read dataset +string BasePath = AppDomain.CurrentDomain.BaseDirectory; +string datasetPath = Path.Combine(BasePath, "dataset", "dataset_03.json"); //edit name of dataset here +Console.WriteLine($"Reading Dataset: {datasetPath}"); +List sequences = HelperMethods.ReadDataset(datasetPath); +``` + +and also *copy the [test data](../dataset/test_01.json) to the folder* (`{BASEPATH}\neocortexapi\source\MySEProject\MultiSequenceLearning\bin\Debug\net6.0\dataset`). + +## Results + +We have experimented to the maximum feasible extent, utilizing a variety of datasets. To accommodate the considerable execution time, we deliberately limited the size of the datasets and maintained the sequences at a minimal length. + +![results](./images/result.png) diff --git a/source/NeoCortexUtils/docs/images/approve_prediction.png b/source/NeoCortexUtils/docs/images/approve_prediction.png new file mode 100644 index 000000000..7eae60aa7 Binary files /dev/null and b/source/NeoCortexUtils/docs/images/approve_prediction.png differ diff --git a/source/NeoCortexUtils/docs/images/dataset.jpg b/source/NeoCortexUtils/docs/images/dataset.jpg new file mode 100644 index 000000000..675cffbee Binary files /dev/null and b/source/NeoCortexUtils/docs/images/dataset.jpg differ diff --git a/source/NeoCortexUtils/docs/images/overview.png b/source/NeoCortexUtils/docs/images/overview.png new file mode 100644 index 000000000..28c20e550 Binary files /dev/null and b/source/NeoCortexUtils/docs/images/overview.png differ diff --git a/source/NeoCortexUtils/docs/images/result.png b/source/NeoCortexUtils/docs/images/result.png new file mode 100644 index 000000000..019ad9bcb Binary files /dev/null and b/source/NeoCortexUtils/docs/images/result.png differ diff --git a/source/NeoCortexUtils/docs/implement-sdr-representation-samples.md b/source/NeoCortexUtils/docs/working-with-sdrs.md similarity index 86% rename from source/NeoCortexUtils/docs/implement-sdr-representation-samples.md rename to source/NeoCortexUtils/docs/working-with-sdrs.md index 0a3d32c17..b7121ef0f 100644 --- a/source/NeoCortexUtils/docs/implement-sdr-representation-samples.md +++ b/source/NeoCortexUtils/docs/working-with-sdrs.md @@ -1,27 +1,28 @@ -# Implement SDR representation samples +# working-with-sdrs ## Introduction: -Neural network is a focal element in the area of machine learning. Inspired from the biological neurons that are present in the human brain, an artificial neural network is designed which mimics human brain’s behavior, helping computer programs to identify patterns and answers to these related issues. It would be able to perform actions like the human brain and has the capability of learning things. These neural networks work on the principle of learning input/output operations. In our project, SDR representation has been implemented in a variety of ways, including SDR as indices and bitmaps. Furthermore, we developed methods for comparing two SDRs by using intersection, union, and overlap. In addition, we have added a new representation of Spatial pooler learning as a "Column/Overlap" ratio, which is a another representation as heatmap. +Neural networks represent a cornerstone in the field of machine learning, drawing inspiration from the biological neurons in the human brain. These artificial neural networks are engineered to emulate the brain's ability to recognize patterns and solve complex problems. Central to this capability is their potential to learn and perform tasks akin to human cognition, grounded in the principle of learning from input/output operations. -The inputs that we are using are scalar values and images. We specifed how these inputs are converted to SDR. Furthermore, this procedure of SDR representations involves the use of Encoders, Spatial Pooler (SP) and Temporal Memory (TM). Encoders are the basic components used in this network, which takes human justifiable information as input data i.e. (image, scalar value) and changes it to machine readable format, binary array with n size. SP uses these encoded binary arrays from encoders as an input for generation of SDRs. +In our project, we have explored various implementations of Sparse Distributed Representation (SDR), including using SDRs as indices and bitmaps. Our methods for comparing SDRs involve techniques like intersection, union, and overlap calculations. Additionally, we've introduced a novel concept: representing Spatial Pooler learning through a "Column/Overlap" ratio, akin to a heatmap representation. -TM is used to learn the sequence of these generated SDR’s which are given as an input from Spatial Pooler (SP). +The inputs for our neural network are scalar values and images. We have detailed the process of converting these inputs into SDRs, which is a crucial step in our methodology. This conversion involves several key components: Encoders, the Spatial Pooler (SP), and Temporal Memory (TM). Encoders serve as the initial processing unit, transforming human-interpretable data (such as images or scalar values) into a binary array format that is machine-readable. The Spatial Pooler then takes these encoded arrays and generates SDRs. +Finally, Temporal Memory plays a pivotal role in learning the sequences of these SDRs, which are fed from the Spatial Pooler. This learning process is fundamental in enabling the neural network to understand and predict patterns in the data, a critical aspect of machine learning. #### What is an SDR: -According to recent research in neuroscience, our brain uses SDRs to process information. SDRs are the binary representation of data which is approximatly 2% of bits which are active. In SDRs, each bit has a meaning i.e. the active bits in the same places of two different vectors make them semantically similar. By comparing SDRs of different samples, the similarity between them can be estimated. For storing the SDRs, a list of indices of active bits are kept which saves a lot of space. +According to recent research in neuroscience, our brain uses SDRs to process information. SDRs are the binary representation of data which is approximately 2% of bits that are active. In SDRs, each bit has a meaning i.e. the active bits in the same places of two different vectors make them semantically similar. By comparing SDRs of different samples, the similarity between them can be estimated. For storing the SDRs, a list of indices of active bits is kept which saves a lot of space. #### Functions dealing with SDRs: -SDRs are generated by passing an input to the encoder. Input can be anything like a number, time/date or an image. The type of encoder for each input is different. For example if our input is date and time we have to use DateTime Encoder and if our input is a number we use scaler encoder to encode it and represent it in form of SDRs. -It is sometimes important to visualise the SDRs in different forms and make comparsions between them for a better understnding of the learning process. We are going to show you how to output SDRs as: +SDRs are generated by passing an input to the encoder. Input can be anything like a number, time/date, or an image. The type of encoder for each input is different. For example, if our input is date and time we have to use DateTime Encoder, and if our input is a number we use a scaler encoder to encode it and represent it in the form of SDRs. +It is sometimes important to visualize the SDRs in different forms and make comparisons between them for a better understanding of the learning process. We are going to show you how to output SDRs as: - Indices - Bitmaps - Heatmaps -Other than the representations, we will show you comparisons of different inputs by using overlap and instersection functions to see how different sdrs are similar or distinct semantically. +Other than the representations, we will show you comparisons of different inputs by using overlap and instersection functions to see how different SDRs are similar or distinct semantically. The functions which are used for the SDR representation are listed: @@ -32,7 +33,7 @@ The functions which are used for the SDR representation are listed: ## How to output SDR as Indices: -SDR's are in the form of 0's & 1's, we have to look up for 1's everytime in an SDR array. So for simplification we just find the index number where SDR is 1. +SDRs are in the form of 0's & 1's, we have to look up for 1's every time in an SDR array. So for simplification, we just find the index number where SDR is 1. SDR as Indices is the returning of the index number where SDRs bits are 1 of a binary array. If you like to visualise SDRs as indices, you can do it by using this function `NeoCortexApi.Helpers.StringifyVector` to get indices of the active (1) bits from the binary array. This function receives the encoded SDR and returns the index numbers of the SDR array where it is active. @@ -51,13 +52,13 @@ Code URL for the SDR Indices: https://github.com/UniversityOfAppliedSciencesFrankfurt/se-cloud-2020-2021/blob/39475d910dd1bdbedc5e83be8bc72eefdd1259c7/MyProject/ML5.8SDRsRepresentations/OutputSDRs/Program.cs#L62 -## How to output SDR as bitmap: +## How to output SDR as a bitmap: #### What is a Bitmap: -Bitmap is a type of file format which is used to store images. A bitmap is a spatially mapped array of bits i.e. a map of bits. For the purpose of representing SDRs as bitmaps, we first feed the output of encoders as inputs to the SP. #### Heatmap Function: +Bitmap is a type of file format which is used to store images. A bitmap is a spatially mapped array of bits i.e. a map of bits. To represent SDRs as bitmaps, we first feed the output of encoders as inputs to the SP. #### Heatmap Function: #### Bitmap Function: -You can visualise Bitmaps using the function `NeoCortexUtils.DrawBitmap()`. We will show you simply how it works. In the first step, you need to convert a one dimensional binary array into two-dimensional binary array using predefined function `ArrayUtils.Make2DArray` as it is required for the conversion into bitmpas. Bitmaps are then generated from the 2D Array with or without a Transpose function `ArrayUtils.Transpose()`. Transpose function is used just to set the orientation. You can skip this part if you like. +You can visualize Bitmaps using the function `NeoCortexUtils.DrawBitmap()`. We will show you simply how it works. In the first step, you need to convert a one-dimensional binary array into the two-dimensional binary array using the predefined function `ArrayUtils.Make2DArray` as it is required for the conversion into bitmaps. Bitmaps are then generated from the 2D Array with or without a Transpose function `ArrayUtils.Transpose()`. A transpose function is used just to set the orientation. You can skip this part if you like. The following is a code snippet for the functions mentioned above: @@ -68,9 +69,9 @@ twoDimenArray = ArrayUtils.Transpose(twoDimenArray); NeoCortexUtils.DrawBitmap(twoDimArray1, 1024, 1024, $"{outFolder}\\Overlap_{sdrs.Count}.png", Color.PaleGreen, Color.Red, text: $"Overlap.png"); ``` -Now we would like to explain what is going on here. The `Array` of first argument is a one dimensional binary array, the second argument `(int)Math.Sqrt(Array.Length)` outputs the value of rows and the third argument `(int)Math.Sqrt(Array.Length)` gives the column values. In order to set the orientation of the bitmap image, we additionally use `ArrayUtils.Transpose()` function. The `twoDimArray` is passed to the `NeoCortexUtils.DrawBitmap` function for the representation of SDR as a bitmap image. +Now we would like to explain what is going on here. The `Array` of the first argument is a one-dimensional binary array, the second argument `(int)Math.Sqrt(Array.Length)` outputs the value of rows, and the third argument `(int)Math.Sqrt(Array.Length)` gives the column values. To set the orientation of the bitmap image, we additionally use `ArrayUtils.Transpose()` function. The `twoDimArray` is passed to the `NeoCortexUtils.DrawBitmap` function for the representation of SDR as a bitmap image. -`twoDimArray1` is the SDR representation in binary form which is to be plotted. `1024 , 1024` shows the size of the final bitmap image. You can adjust it based on your requirements. `$"{outFolder}\\Overlap_{sdrs.Count}.png"` shows the path where output image is going to be saved. `Color.PaleGreen, Color.Red` here you can set the colors of the On and Off bits. In my case, PaleGreen is for Off bits and Red is for On bits. `text: $"Overlap.png"` is it the text which is going to be visible on the top left of the image. +`twoDimArray1` is the SDR representation in binary form which is to be plotted. `1024 , 1024` shows the size of the final bitmap image. You can adjust it based on your requirements. `$"{outFolder}\\Overlap_{sdrs.Count}.png"` shows the path where the output image is going to be saved. `Color.PaleGreen, Color.Red` here you can set the colors of the On and Off bits. In my case, PaleGreen is for Off bits and Red is for On bits. `text: $"Overlap.png"` is the text which is going to be visible on the top left of the image. @@ -85,17 +86,17 @@ The following images are the output of the function `NeoCortexUtils.DrawBitmap() [img4]: https://user-images.githubusercontent.com/59200478/113517520-e3d73980-9599-11eb-956d-c299a5b452f6.png [img5]: https://user-images.githubusercontent.com/59200478/113517529-efc2fb80-9599-11eb-9eb7-90bbc97d42d5.png -URL of SDR code as bitmap : +URL of SDR code as a bitmap : https://github.com/UniversityOfAppliedSciencesFrankfurt/se-cloud-2020-2021/blob/39475d910dd1bdbedc5e83be8bc72eefdd1259c7/MyProject/ML5.8SDRsRepresentations/OutputSDRs/Program.cs#L109 ## How to output SDR as Heatmap: #### What is a Heatmap: -Sometimes is useful to see the activation of all cells. Some of them might be very active but still didn't reach the activation threshold. In such cases, the heatmap (URL) can be used. Heatmap is a graphical representation of data which uses a color coding system for representing different activation thresholds. +Sometimes is useful to see the activation of all cells. Some of them might be very active but still didn't reach the activation threshold. In such cases, the heatmap (URL) can be used. A heatmap is a graphical representation of data that uses a color coding system for representing different activation thresholds. #### Heatmap Function: -If you are looking to create a heatmap of an SDR learning. you can use `DrawHeatmaps` function but with some minor changes in the arguments. A heatmap representation of SDRs will depict the active columns and categorize them in different colors based on their usage during learning. How we are using the `DrawHeatmaps` function for this purpose you can see the arguments we have used to generate heatmaps down below. A code snippet is shown for the creation of heatmaps: +If you are looking to create a heatmap of an SDR learning. you can use the `DrawHeatmaps` function but with some minor changes in the arguments. A heatmap representation of SDRs will depict the active columns and categorize them in different colors based on their usage during learning. How we are using the `DrawHeatmaps` function for this purpose you can see the arguments we have used to generate heatmaps down below. A code snippet is shown for the creation of heatmaps: ```csharp public static void DrawHeatmaps(List twoDimArrays, String filePath, @@ -104,7 +105,7 @@ If you are looking to create a heatmap of an SDR learning. you can use `DrawHeat decimal redStart = 200, decimal yellowMiddle = 127, decimal greenStart = 20) ``` -Now here `decimal redStart = 200, decimal yellowMiddle = 127, decimal greenStart = 20` you can set the activation threshold of any cell with its color. In my case, red color is for the cell which are activated more than or equal to 200 times. Yellow color is for the cells which are activated more than or equal to 127 times. And green cells are those who are activated for 20 time or more. +Now here `decimal redStart = 200, decimal yellowMiddle = 127, decimal greenStart = 20` you can set the activation threshold of any cell with its color. In my case, the red color is for the cells that are activated more than or equal to 200 times. The yellow color is for the cells that are activated more than or equal to 127 times. Green cells are those that are activated 20 times or more. For the representation of SDRs as heatmaps, we took the following images which are the SDRs generated by encoders: diff --git a/source/NeocortexApiLLMSample/Entities/TokenTable.cs b/source/NeocortexApiLLMSample/Entities/TokenTable.cs new file mode 100644 index 000000000..37bd8dbb9 --- /dev/null +++ b/source/NeocortexApiLLMSample/Entities/TokenTable.cs @@ -0,0 +1,12 @@ +using System; +using System.Collections.Generic; +using System.Linq; +using System.Text; +using System.Threading.Tasks; + +namespace NeocortexApiLLMSample.Entities +{ + public class TokenTable + { + } +} diff --git a/source/NeocortexApiLLMSample/Interfaces/ICorpusLoader.cs b/source/NeocortexApiLLMSample/Interfaces/ICorpusLoader.cs new file mode 100644 index 000000000..006db83b9 --- /dev/null +++ b/source/NeocortexApiLLMSample/Interfaces/ICorpusLoader.cs @@ -0,0 +1,19 @@ +using System; +using System.Collections.Generic; +using System.Linq; +using System.Text; +using System.Threading.Tasks; + +namespace NeocortexApiLLMSample.Interfaces +{ + public interface ICorpusLoader + { + /// + /// Loads the corpus data from the given bookmark. + /// + /// Specifies ehere to start loading. + /// Return the bookmark, that might be used at the next call. + Task Load(string bookmark); + + } +} diff --git a/source/NeocortexApiLLMSample/Interfaces/ITokenSimilarity.cs b/source/NeocortexApiLLMSample/Interfaces/ITokenSimilarity.cs new file mode 100644 index 000000000..6f80fc746 --- /dev/null +++ b/source/NeocortexApiLLMSample/Interfaces/ITokenSimilarity.cs @@ -0,0 +1,23 @@ +using System; +using System.Collections.Generic; +using System.Linq; +using System.Text; +using System.Threading.Tasks; + +namespace NeocortexApiLLMSample.Interfaces +{ + /// + /// Defines the used to calculate similarity between tokens. + /// + public interface ITokenSimilarity + { + /// + /// Calculates the similarity between two words. It does not use embeddings or similar technique. + /// + /// Any kind of token like syllable. + /// + /// + int CalcSimilarity(string token1, string token2); + + } +} diff --git a/source/NeocortexApiLLMSample/LevensteinSimilarity.cs b/source/NeocortexApiLLMSample/LevensteinSimilarity.cs new file mode 100644 index 000000000..6d04ea987 --- /dev/null +++ b/source/NeocortexApiLLMSample/LevensteinSimilarity.cs @@ -0,0 +1,43 @@ +using NeocortexApiLLMSample.Interfaces; +using System; +using System.Collections.Generic; +using System.Linq; +using System.Text; +using System.Threading.Tasks; + +namespace NeocortexApiLLMSample +{ + internal class LevensteinSimilarity : ITokenSimilarity + { + public int CalcSimilarity(string word1, string word2) + { + int m = word1.Length; + int n = word2.Length; + + // Create a 2D matrix to store distances + int[,] dp = new int[m + 1, n + 1]; + + // Initialize the matrix + for (int i = 0; i <= m; ++i) + dp[i, 0] = i; + + for (int j = 0; j <= n; ++j) + dp[0, j] = j; + + // Fill the matrix + for (int i = 1; i <= m; ++i) + { + for (int j = 1; j <= n; ++j) + { + if (word1[i - 1] == word2[j - 1]) + dp[i, j] = dp[i - 1, j - 1]; + else + dp[i, j] = 1 + Math.Min(Math.Min(dp[i - 1, j], dp[i, j - 1]), dp[i - 1, j - 1]); + } + } + + return dp[m, n]; + } + } + +} diff --git a/source/NeocortexApiLLMSample/NeocortexApiLLMSample.csproj b/source/NeocortexApiLLMSample/NeocortexApiLLMSample.csproj new file mode 100644 index 000000000..2150e3797 --- /dev/null +++ b/source/NeocortexApiLLMSample/NeocortexApiLLMSample.csproj @@ -0,0 +1,10 @@ + + + + Exe + net8.0 + enable + enable + + + diff --git a/source/NeocortexApiLLMSample/Program.cs b/source/NeocortexApiLLMSample/Program.cs new file mode 100644 index 000000000..8936f797f --- /dev/null +++ b/source/NeocortexApiLLMSample/Program.cs @@ -0,0 +1,16 @@ +using NeocortexApiLLMSample.Interfaces; + +namespace NeocortexApiLLMSample +{ + internal class Program + { + static async Task Main(string[] args) + { + Console.WriteLine("Hello, LLM!"); + + ICorpusLoader loader = new TextFileCorpusLoader("corpus.txt"); + + await loader.Load("bookmark"); + } + } +} diff --git a/source/NeocortexApiLLMSample/TextFileCorpusLoader.cs b/source/NeocortexApiLLMSample/TextFileCorpusLoader.cs new file mode 100644 index 000000000..570674eb4 --- /dev/null +++ b/source/NeocortexApiLLMSample/TextFileCorpusLoader.cs @@ -0,0 +1,73 @@ +using System; +using System.Collections.Generic; +using System.Linq; +using System.Text; +using System.Text.RegularExpressions; +using System.Threading.Tasks; + +namespace NeocortexApiLLMSample +{ + public class TextFileCorpusLoader : Interfaces.ICorpusLoader + { + private readonly string _file; + + private Dictionary _words = new Dictionary(); + + public TextFileCorpusLoader(string fileName) + { + if (string.IsNullOrEmpty(fileName)) + { + throw new ArgumentException("File name must not be null or empty.", nameof(fileName)); + } + + _file = fileName; + } + + public async Task Load(string bookmark) + { + using (StreamReader sr = new StreamReader(_file)) + { + string line = await sr.ReadLineAsync()!; + + if (line != null) + { + var words = ExtractWords(line); + } + } + + // We do not support bookmaring right now. + return null; + } + + private static List ExtractWords(string input) + { + // Use a regular expression to match word boundaries + // \b asserts a word boundary + // \w+ matches one or more word characters (alphanumeric or underscore) + MatchCollection matches = Regex.Matches(input, @"\b\w+\b"); + + // Convert MatchCollection to a List + List words = new List(); + foreach (Match match in matches) + { + words.Add(match.Value); + } + + return words; + } + + public IEnumerator GetEnumerator() + { + using (StreamReader sr = new StreamReader(_file)) + { + string? line = sr.ReadLine(); + while (line != null) + { + yield return line; + line = sr.ReadLine(); + } + } + } + } + +} diff --git a/source/Samples/ApproveMultiSequenceLearning/ApproveMultiSequenceLearning.csproj b/source/Samples/ApproveMultiSequenceLearning/ApproveMultiSequenceLearning.csproj new file mode 100644 index 000000000..9835eb800 --- /dev/null +++ b/source/Samples/ApproveMultiSequenceLearning/ApproveMultiSequenceLearning.csproj @@ -0,0 +1,39 @@ + + + + Exe + net8.0 + enable + enable + + + + + + + + + + + + + + + + Always + + + Always + + + Always + + + Always + + + Always + + + + diff --git a/source/Samples/ApproveMultiSequenceLearning/DataSet/dataset_01.json b/source/Samples/ApproveMultiSequenceLearning/DataSet/dataset_01.json new file mode 100644 index 000000000..fd4a66239 --- /dev/null +++ b/source/Samples/ApproveMultiSequenceLearning/DataSet/dataset_01.json @@ -0,0 +1 @@ +[{"name":"S1","data":[0,2,5,7,8,11,13,14,17,21,23,24,25,26,27,28,29]},{"name":"S2","data":[0,1,2,3,4,6,7,9,14,15,17,22,23,24,25,27,28]},{"name":"S3","data":[2,3,5,9,10,11,12,13,17,19,20,21,23,25,26,27,29]}] diff --git a/source/Samples/ApproveMultiSequenceLearning/DataSet/dataset_02.json b/source/Samples/ApproveMultiSequenceLearning/DataSet/dataset_02.json new file mode 100644 index 000000000..c694ac433 --- /dev/null +++ b/source/Samples/ApproveMultiSequenceLearning/DataSet/dataset_02.json @@ -0,0 +1 @@ +[{"name":"S1","data":[0,2,4,5,6,7,8,9,11,13,14,15,16,19]},{"name":"S2","data":[0,1,2,4,5,10,12,13,14,15,16,17,18,19]},{"name":"S3","data":[0,1,3,4,6,7,8,10,11,13,16,17,18,19]}] diff --git a/source/Samples/ApproveMultiSequenceLearning/DataSet/dataset_03.json b/source/Samples/ApproveMultiSequenceLearning/DataSet/dataset_03.json new file mode 100644 index 000000000..6698e3ffe --- /dev/null +++ b/source/Samples/ApproveMultiSequenceLearning/DataSet/dataset_03.json @@ -0,0 +1 @@ +[{"name":"S1","data":[0,1,4,5,6,7,8,13,14]},{"name":"S2","data":[0,1,2,3,4,5,7,8,11]},{"name":"S3","data":[0,1,2,3,4,5,6,9,13]},{"name":"S4","data":[1,3,5,8,9,10,11,12,14]},{"name":"S5","data":[0,3,4,6,7,10,12,13,14]},{"name":"S6","data":[2,3,4,5,7,8,9,10,14]},{"name":"S7","data":[0,2,3,4,5,6,8,10,13]},{"name":"S8","data":[1,2,4,5,6,10,11,12,13]},{"name":"S9","data":[0,1,2,3,4,8,10,12,13]},{"name":"S10","data":[0,1,2,4,5,6,7,9,11]},{"name":"S11","data":[2,3,4,9,10,11,12,13,14]},{"name":"S12","data":[0,2,3,4,5,6,7,12,13]},{"name":"S13","data":[1,2,4,6,8,9,11,12,14]},{"name":"S14","data":[2,4,6,8,9,10,11,12,13]},{"name":"S15","data":[0,1,3,6,8,9,10,13,14]},{"name":"S16","data":[1,2,3,8,9,10,11,12,14]},{"name":"S17","data":[0,1,3,4,6,7,9,10,13]},{"name":"S18","data":[1,3,5,6,8,9,10,12,13]},{"name":"S19","data":[1,2,4,5,6,10,11,12,14]},{"name":"S20","data":[0,2,4,5,6,9,10,13,14]},{"name":"S21","data":[0,4,5,6,7,8,11,13,14]},{"name":"S22","data":[0,3,5,6,7,8,9,10,13]},{"name":"S23","data":[0,1,2,3,5,7,9,10,13]},{"name":"S24","data":[0,3,4,6,8,9,10,13,14]},{"name":"S25","data":[0,1,2,3,5,8,10,12,13]},{"name":"S26","data":[0,3,5,6,8,9,10,12,13]},{"name":"S27","data":[1,3,4,5,6,8,11,12,14]},{"name":"S28","data":[0,3,5,6,7,9,11,12,13]},{"name":"S29","data":[0,2,3,4,5,6,11,12,14]},{"name":"S30","data":[0,1,2,3,4,6,8,9,12]}] diff --git a/source/Samples/ApproveMultiSequenceLearning/DataSet/dataset_04.json b/source/Samples/ApproveMultiSequenceLearning/DataSet/dataset_04.json new file mode 100644 index 000000000..9ec97ca11 --- /dev/null +++ b/source/Samples/ApproveMultiSequenceLearning/DataSet/dataset_04.json @@ -0,0 +1 @@ +[{"name":"S1","data":[0,2,5,6,7,8,10,11,13]},{"name":"S2","data":[1,2,3,4,6,11,12,13,14]},{"name":"S3","data":[1,2,3,4,7,8,10,12,14]}] diff --git a/source/Samples/ApproveMultiSequenceLearning/DataSet/test_01.json b/source/Samples/ApproveMultiSequenceLearning/DataSet/test_01.json new file mode 100644 index 000000000..3785c6d06 --- /dev/null +++ b/source/Samples/ApproveMultiSequenceLearning/DataSet/test_01.json @@ -0,0 +1,18 @@ +[ + { + "name": "T1", + "data": [ 5, 6, 7, 8 ] + }, + { + "name": "T2", + "data": [ 6, 11, 12, 13 ] + }, + { + "name": "T3", + "data": [ 1, 2, 3, 4 ] + }, + { + "name": "T4", + "data": [ 3, 4, 7, 8, 10 ] + } +] diff --git a/source/Samples/ApproveMultiSequenceLearning/HelperMethods.cs b/source/Samples/ApproveMultiSequenceLearning/HelperMethods.cs new file mode 100644 index 000000000..acaa4da22 --- /dev/null +++ b/source/Samples/ApproveMultiSequenceLearning/HelperMethods.cs @@ -0,0 +1,142 @@ +using System; +using System.Globalization; +using System.IO; +using System.Collections.Generic; +using System.Linq; +using System.Text; +using System.Threading.Tasks; +using NeoCortexApi; +using NeoCortexApi.Encoders; +using NeoCortexApi.Entities; +using MultiSequenceLearning; +using Newtonsoft.Json; + +namespace MultiSequenceLearning +{ + public class HelperMethods + { + // Constants for default settings + private const int DefaultRandomSeed = 42; + private const double MaxScalarValue = 20.0; + + // Avoid magic numbers in your code + private const int DefaultCellsPerColumn = 25; + private const double DefaultGlobalInhibitionDensity = 0.02; + private const double DefaultPotentialRadiusFactor = 0.15; + private const double DefaultMaxSynapsesPerSegmentFactor = 0.02; + private const double DefaultMaxBoost = 10.0; + private const int DefaultDutyCyclePeriod = 25; + private const double DefaultMinPctOverlapDutyCycles = 0.75; + private const int DefaultActivationThreshold = 15; + private const double DefaultConnectedPermanence = 0.5; + private const double DefaultPermanenceDecrement = 0.25; + private const double DefaultPermanenceIncrement = 0.15; + private const double DefaultPredictedSegmentDecrement = 0.1; + + /// + /// HTM Config for creating Connections + /// + public static HtmConfig FetchHTMConfig(int inputBits, int numColumns) + { + return new HtmConfig(new int[] { inputBits }, new int[] { numColumns }) + { + Random = new ThreadSafeRandom(DefaultRandomSeed), + CellsPerColumn = DefaultCellsPerColumn, + GlobalInhibition = true, + LocalAreaDensity = -1, + NumActiveColumnsPerInhArea = DefaultGlobalInhibitionDensity * numColumns, + PotentialRadius = (int)(DefaultPotentialRadiusFactor * inputBits), + MaxBoost = DefaultMaxBoost, + DutyCyclePeriod = DefaultDutyCyclePeriod, + MinPctOverlapDutyCycles = DefaultMinPctOverlapDutyCycles, + MaxSynapsesPerSegment = (int)(DefaultMaxSynapsesPerSegmentFactor * numColumns), + ActivationThreshold = DefaultActivationThreshold, + ConnectedPermanence = DefaultConnectedPermanence, + PermanenceDecrement = DefaultPermanenceDecrement, + PermanenceIncrement = DefaultPermanenceIncrement, + PredictedSegmentDecrement = DefaultPredictedSegmentDecrement + }; + } + + /// + /// Get the encoder with settings + /// + public static EncoderBase GetEncoder(int inputBits) + { + var settings = new Dictionary + { + { "W", 15 }, + { "N", inputBits }, + { "Radius", -1.0 }, + { "MinVal", 0.0 }, + { "Periodic", false }, + { "Name", "scalar" }, + { "ClipInput", false }, + { "MaxVal", MaxScalarValue } + }; + + return new ScalarEncoder(settings); + } + + /// + /// Reads dataset from the file + /// + public static List ReadDataset(string path) + { + Console.WriteLine("Reading Sequence..."); + try + { + string fileContent = File.ReadAllText(path); + return JsonConvert.DeserializeObject>(fileContent); + } + catch (Exception ex) + { + Console.WriteLine($"Failed to read the dataset: {ex.Message}"); + return new List(); // Return an empty list in case of failure + } + } + + /// + /// Saves the dataset in 'dataset' folder in BasePath of application + /// + public static string SaveDataset(List sequences) + { + string basePath = AppDomain.CurrentDomain.BaseDirectory; + string datasetFolder = Path.Combine(basePath, "dataset"); + Directory.CreateDirectory(datasetFolder); // CreateDirectory is safe to call if directory exists + string datasetPath = Path.Combine(datasetFolder, $"dataset_{DateTime.Now.Ticks}.json"); + + Console.WriteLine("Saving dataset..."); + File.WriteAllText(datasetPath, JsonConvert.SerializeObject(sequences)); + return datasetPath; + } + + /// + /// Creates multiple sequences as per parameters + /// + public static List CreateSequences(int count, int size, int startVal, int stopVal) + { + return Enumerable.Range(1, count).Select(i => + new Sequence + { + name = $"S{i}", + data = GenerateRandomSequence(size, startVal, stopVal) + }) + .ToList(); + } + + private static int[] GenerateRandomSequence(int size, int startVal, int stopVal) + { + var rnd = new Random(); + var sequence = new HashSet(); + + while (sequence.Count < size) + { + int number = rnd.Next(startVal, stopVal + 1); + sequence.Add(number); + } + + return sequence.OrderBy(n => n).ToArray(); + } + } +} \ No newline at end of file diff --git a/source/Samples/ApproveMultiSequenceLearning/MultiSequenceLearning.csproj b/source/Samples/ApproveMultiSequenceLearning/MultiSequenceLearning.csproj new file mode 100644 index 000000000..f1e8cb154 --- /dev/null +++ b/source/Samples/ApproveMultiSequenceLearning/MultiSequenceLearning.csproj @@ -0,0 +1,20 @@ + + + + Exe + net6.0 + enable + enable + + + + + + + + + + + + + diff --git a/source/Samples/ApproveMultiSequenceLearning/MultisequenceLearning.cs b/source/Samples/ApproveMultiSequenceLearning/MultisequenceLearning.cs new file mode 100644 index 000000000..6759ae060 --- /dev/null +++ b/source/Samples/ApproveMultiSequenceLearning/MultisequenceLearning.cs @@ -0,0 +1,321 @@ +using System; +using System.Globalization; +using System.IO; +using System.Collections.Generic; +using System.Linq; +using System.Text; +using System.Threading.Tasks; +using NeoCortexApi; +using NeoCortexApi.Classifiers; +using NeoCortexApi.Encoders; +using NeoCortexApi.Entities; +using NeoCortexApi.Classifiers; +using NeoCortexApi.Network; +using System; +using System.Collections.Generic; +using System.Diagnostics; +using System.Linq; + + +namespace MultiSequenceLearning +{ + /// + /// Implements an experiment that demonstrates how to learn sequences. + /// + public class MultiSequenceLearning + { + /// + /// Runs the learning of sequences. + /// + /// Dictionary of sequences. KEY is the sewuence name, the VALUE is th elist of element of the sequence. + public Predictor Run(List sequences) + { + Console.WriteLine($"Hello NeocortexApi! Experiment {nameof(MultiSequenceLearning)}"); + + int inputBits = 100; + int numColumns = 1024; + + HtmConfig cfg = HelperMethods.FetchHTMConfig(inputBits, numColumns); + + EncoderBase encoder = HelperMethods.GetEncoder(inputBits); + + return RunExperiment(inputBits, cfg, encoder, sequences); + } + + /// + /// + /// + private Predictor RunExperiment(int inputBits, HtmConfig cfg, EncoderBase encoder, List sequences) + { + Stopwatch sw = new Stopwatch(); + sw.Start(); + + int maxMatchCnt = 0; + + var mem = new Connections(cfg); + + bool isInStableState = false; + + HtmClassifier cls = new HtmClassifier(); + + var numUniqueInputs = GetNumberOfInputs(sequences); + + CortexLayer layer1 = new CortexLayer("L1"); + + TemporalMemory tm = new TemporalMemory(); + + Console.WriteLine("------------ START ------------"); + + // For more information see following paper: https://www.scitepress.org/Papers/2021/103142/103142.pdf + HomeostaticPlasticityController hpc = new HomeostaticPlasticityController(mem, numUniqueInputs * 150, (isStable, numPatterns, actColAvg, seenInputs) => + { + if (isStable) + // Event should be fired when entering the stable state. + Debug.WriteLine($"STABLE: Patterns: {numPatterns}, Inputs: {seenInputs}, iteration: {seenInputs / numPatterns}"); + else + // Ideal SP should never enter unstable state after stable state. + Debug.WriteLine($"INSTABLE: Patterns: {numPatterns}, Inputs: {seenInputs}, iteration: {seenInputs / numPatterns}"); + + // We are not learning in instable state. + isInStableState = isStable; + + // Clear active and predictive cells. + //tm.Reset(mem); + }, numOfCyclesToWaitOnChange: 50); + + + SpatialPoolerMT sp = new SpatialPoolerMT(hpc); + sp.Init(mem); + tm.Init(mem); + + // Please note that we do not add here TM in the layer. + // This is omitted for practical reasons, because we first eneter the newborn-stage of the algorithm + // In this stage we want that SP get boosted and see all elements before we start learning with TM. + // All would also work fine with TM in layer, but it would work much slower. + // So, to improve the speed of experiment, we first ommit the TM and then after the newborn-stage we add it to the layer. + layer1.HtmModules.Add("encoder", encoder); + layer1.HtmModules.Add("sp", sp); + + //double[] inputs = inputValues.ToArray(); + int[] prevActiveCols = new int[0]; + + int cycle = 0; + int matches = 0; + + var lastPredictedValues = new List(new string[] { "0" }); + + int maxCycles = 3500; + + // + // Training SP to get stable. New-born stage. + // + + for (int i = 0; i < maxCycles && isInStableState == false; i++) + { + matches = 0; + + cycle++; + + Debug.WriteLine($"-------------- Newborn SP Cycle {cycle} ---------------"); + Console.WriteLine($"-------------- Newborn SP Cycle {cycle} ---------------"); + + foreach (var inputs in sequences) + { + foreach (var input in inputs.data) + { + Debug.WriteLine($" -- {inputs.name} - {input} --"); + + var lyrOut = layer1.Compute(input, true); + + if (isInStableState) + break; + } + + if (isInStableState) + break; + } + } + + // Clear all learned patterns in the classifier. + cls.ClearState(); + + // We activate here the Temporal Memory algorithm. + layer1.HtmModules.Add("tm", tm); + + // + // Loop over all sequences. + foreach (var sequenceKeyPair in sequences) + { + Debug.WriteLine($"-------------- Sequences {sequenceKeyPair.name} ---------------"); + Console.WriteLine($"-------------- Sequences {sequenceKeyPair.name} ---------------"); + + int maxPrevInputs = sequenceKeyPair.data.Length - 1; + + List previousInputs = new List(); + + previousInputs.Add("-1"); + + // + // Now training with SP+TM. SP is pretrained on the given input pattern set. + for (int i = 0; i < maxCycles; i++) + { + matches = 0; + + cycle++; + + Debug.WriteLine(""); + + Debug.WriteLine($"-------------- Cycle SP+TM{cycle} ---------------"); + Console.WriteLine($"-------------- Cycle SP+TM {cycle} ---------------"); + + foreach (var input in sequenceKeyPair.data) + { + Debug.WriteLine($"-------------- {input} ---------------"); + + var lyrOut = layer1.Compute(input, true) as ComputeCycle; + + var activeColumns = layer1.GetResult("sp") as int[]; + + previousInputs.Add(input.ToString()); + if (previousInputs.Count > (maxPrevInputs + 1)) + previousInputs.RemoveAt(0); + + // In the pretrained SP with HPC, the TM will quickly learn cells for patterns + // In that case the starting sequence 4-5-6 might have the sam SDR as 1-2-3-4-5-6, + // Which will result in returning of 4-5-6 instead of 1-2-3-4-5-6. + // HtmClassifier allways return the first matching sequence. Because 4-5-6 will be as first + // memorized, it will match as the first one. + if (previousInputs.Count < maxPrevInputs) + continue; + + string key = GetKey(previousInputs, input, sequenceKeyPair.name); + + List actCells; + + if (lyrOut.ActiveCells.Count == lyrOut.WinnerCells.Count) + { + actCells = lyrOut.ActiveCells; + } + else + { + actCells = lyrOut.WinnerCells; + } + + cls.Learn(key, actCells.ToArray()); + + Debug.WriteLine($"Col SDR: {Helpers.StringifyVector(lyrOut.ActivColumnIndicies)}"); + Debug.WriteLine($"Cell SDR: {Helpers.StringifyVector(actCells.Select(c => c.Index).ToArray())}"); + + // + // If the list of predicted values from the previous step contains the currently presenting value, + // we have a match. + if (lastPredictedValues.Contains(key)) + { + matches++; + Debug.WriteLine($"Match. Actual value: {key} - Predicted value: {lastPredictedValues.FirstOrDefault(key)}."); + } + else + Debug.WriteLine($"Missmatch! Actual value: {key} - Predicted values: {String.Join(',', lastPredictedValues)}"); + + if (lyrOut.PredictiveCells.Count > 0) + { + //var predictedInputValue = cls.GetPredictedInputValue(lyrOut.PredictiveCells.ToArray()); + var predictedInputValues = cls.GetPredictedInputValues(lyrOut.PredictiveCells.ToArray(), 3); + + foreach (var item in predictedInputValues) + { + Debug.WriteLine($"Current Input: {input} \t| Predicted Input: {item.PredictedInput} - {item.Similarity}"); + } + + lastPredictedValues = predictedInputValues.Select(v => v.PredictedInput).ToList(); + } + else + { + Debug.WriteLine($"NO CELLS PREDICTED for next cycle."); + lastPredictedValues = new List(); + } + } + + // The first element (a single element) in the sequence cannot be predicted + double maxPossibleAccuraccy = (double)((double)sequenceKeyPair.data.Length - 1) / (double)sequenceKeyPair.data.Length * 100.0; + + double accuracy = (double)matches / (double)sequenceKeyPair.data.Length * 100.0; + + Debug.WriteLine($"Cycle: {cycle}\tMatches={matches} of {sequenceKeyPair.data.Length}\t {accuracy}%"); + Console.WriteLine($"Cycle: {cycle}\tMatches={matches} of {sequenceKeyPair.data.Length}\t {accuracy}%"); + + if (accuracy >= maxPossibleAccuraccy) + { + maxMatchCnt++; + Debug.WriteLine($"100% accuracy reched {maxMatchCnt} times."); + + // + // Experiment is completed if we are 30 cycles long at the 100% accuracy. + if (maxMatchCnt >= 30) + { + sw.Stop(); + Debug.WriteLine($"Sequence learned. The algorithm is in the stable state after 30 repeats with with accuracy {accuracy} of maximum possible {maxMatchCnt}. Elapsed sequence {sequenceKeyPair.name} learning time: {sw.Elapsed}."); + break; + } + } + else if (maxMatchCnt > 0) + { + Debug.WriteLine($"At 100% accuracy after {maxMatchCnt} repeats we get a drop of accuracy with accuracy {accuracy}. This indicates instable state. Learning will be continued."); + maxMatchCnt = 0; + } + + // This resets the learned state, so the first element starts allways from the beginning. + tm.Reset(mem); + } + } + + Debug.WriteLine("------------ END ------------"); + + return new Predictor(layer1, mem, cls); + } + + + /// + /// Gets the number of all unique inputs. + /// + /// Alle sequences. + /// + private int GetNumberOfInputs(List sequences) + { + int num = 0; + + foreach (var inputs in sequences) + { + //num += inputs.Value.Distinct().Count(); + num += inputs.data.Length; + } + + return num; + } + + + /// + /// Constracts the unique key of the element of an sequece. This key is used as input for HtmClassifier. + /// It makes sure that alle elements that belong to the same sequence are prefixed with the sequence. + /// The prediction code can then extract the sequence prefix to the predicted element. + /// + /// + /// + /// + /// + private static string GetKey(List prevInputs, double input, string sequence) + { + string key = String.Empty; + + for (int i = 0; i < prevInputs.Count; i++) + { + if (i > 0) + key += "-"; + + key += (prevInputs[i]); + } + //Console.WriteLine($"GetKey={sequence}_{key}"); + return $"{sequence}_{key}"; + } + } +} diff --git a/source/Samples/ApproveMultiSequenceLearning/Program.cs b/source/Samples/ApproveMultiSequenceLearning/Program.cs new file mode 100644 index 000000000..6c6b427df --- /dev/null +++ b/source/Samples/ApproveMultiSequenceLearning/Program.cs @@ -0,0 +1,152 @@ +using NeoCortexApi; +using NeoCortexApi.Encoders; +using Newtonsoft.Json; +using System; +using System.Collections.Generic; +using System.Diagnostics; +using System.Linq; +using static MultiSequenceLearning.MultiSequenceLearning; + +namespace MultiSequenceLearning +{ + class Program + { + private const string DatasetFolder = "dataset"; + private const string ReportFolder = "report"; + private const string DatasetFileName = "dataset_03.json"; + private const string TestsetFileName = "test_01.json"; + + static void Main(string[] args) + { + //to read Input Dataset + string basePath = AppDomain.CurrentDomain.BaseDirectory; + List sequences = ReadDataset(Path.Combine(basePath, DatasetFolder, DatasetFileName)); + //to read test dataset + List sequencesTest = ReadDataset(Path.Combine(basePath, DatasetFolder, TestsetFileName)); + + List reports = RunMultiSequenceLearningExperiment(sequences, sequencesTest); + WriteReport(reports, basePath); + } + + private static List ReadDataset(string datasetPath) + { + try + { + Console.WriteLine($"Reading Dataset: {datasetPath}"); + return JsonConvert.DeserializeObject>(File.ReadAllText(datasetPath)); + } + catch (Exception ex) + { + Console.WriteLine($"Error reading dataset: {ex.Message}"); + return new List(); + } + } + + private static void WriteReport(List reports, string basePath) + { + string reportFolder = EnsureDirectory(Path.Combine(basePath, ReportFolder)); + string reportPath = Path.Combine(reportFolder, $"report_{DateTime.Now.Ticks}.txt"); + + using (StreamWriter sw = File.CreateText(reportPath)) + { + foreach (Report report in reports) + { + WriteReportContent(sw, report); + } + } + } + + private static string EnsureDirectory(string path) + { + if (!Directory.Exists(path)) + Directory.CreateDirectory(path); + return path; + } + + private static void WriteReportContent(StreamWriter sw, Report report) + { + sw.WriteLine("------------------------------"); + sw.WriteLine($"Using test sequence: {report.SequenceName} -> {string.Join("-", report.SequenceData)}"); + foreach (string log in report.PredictionLog) + { + sw.WriteLine($"\t{log}"); + } + sw.WriteLine($"\tAccuracy: {report.Accuracy}%"); + sw.WriteLine("------------------------------"); + } + + private static List RunMultiSequenceLearningExperiment(List sequences, List sequencesTest) + { + var reports = new List(); + var experiment = new MultiSequenceLearning(); + var predictor = experiment.Run(sequences); + + foreach (Sequence item in sequencesTest) + { + var report = new Report + { + SequenceName = item.name, + SequenceData = item.data + }; + + double accuracy = PredictNextElement(predictor, item.data, report); + report.Accuracy = accuracy; + reports.Add(report); + + Console.WriteLine($"Accuracy for {item.name} sequence: {accuracy}%"); + } + + return reports; + } + + private static double PredictNextElement(Predictor predictor, int[] list, Report report) + { + int matchCount = 0, predictions = 0; + List logs = new List(); + + predictor.Reset(); + + for (int i = 0; i < list.Length - 1; i++) + { + int current = list[i]; + int next = list[i + 1]; + + logs.Add(PredictElement(predictor, current, next, ref matchCount)); + predictions++; + } + + report.PredictionLog = logs; + return CalculateAccuracy(matchCount, predictions); + } + + private static string PredictElement(Predictor predictor, int current, int next, ref int matchCount) + { + Console.WriteLine($"Input: {current}"); + var predictions = predictor.Predict(current); + if (predictions.Any()) + { + var highestPrediction = predictions.OrderByDescending(p => p.Similarity).First(); + string predictedSequence = highestPrediction.PredictedInput.Split('-').First(); + int predictedNext = int.Parse(highestPrediction.PredictedInput.Split('-').Last()); + + Console.WriteLine($"Predicted Sequence: {predictedSequence} - Predicted next element: {predictedNext}"); + if (predictedNext == next) + matchCount++; + + return $"Input: {current}, Predicted Sequence: {predictedSequence}, Predicted next element: {predictedNext}"; + } + else + { + Console.WriteLine("Nothing predicted"); + return $"Input: {current}, Nothing predicted"; + } + } + + private static double CalculateAccuracy(int matchCount, int predictions) + { + return (double)matchCount / predictions * 100; + } + } + + +} \ No newline at end of file diff --git a/source/Samples/ApproveMultiSequenceLearning/Report.cs b/source/Samples/ApproveMultiSequenceLearning/Report.cs new file mode 100644 index 000000000..0bc901651 --- /dev/null +++ b/source/Samples/ApproveMultiSequenceLearning/Report.cs @@ -0,0 +1,20 @@ +using Org.BouncyCastle.Asn1.Mozilla; +using System; +using System.Collections.Generic; +using System.Linq; +using System.Text; +using System.Threading.Tasks; + +namespace MultiSequenceLearning +{ + public class Report + { + public Report() { } + + public string SequenceName { get; set; } + public int[] SequenceData { get; set; } + public List PredictionLog { get; set; } + public double Accuracy { get; set; } + + } +} diff --git a/source/Samples/ApproveMultiSequenceLearning/Sequence.cs b/source/Samples/ApproveMultiSequenceLearning/Sequence.cs new file mode 100644 index 000000000..d8fcb7732 --- /dev/null +++ b/source/Samples/ApproveMultiSequenceLearning/Sequence.cs @@ -0,0 +1,14 @@ +using System; +using System.Collections.Generic; +using System.Linq; +using System.Text; +using System.Threading.Tasks; + +namespace MultiSequenceLearning +{ + public class Sequence + { + public String name { get; set; } + public int[] data { get; set; } + } +} diff --git a/source/Samples/ApproveMultiSequenceLearning/report/report_638172569568213902.txt b/source/Samples/ApproveMultiSequenceLearning/report/report_638172569568213902.txt new file mode 100644 index 000000000..ee1234df5 --- /dev/null +++ b/source/Samples/ApproveMultiSequenceLearning/report/report_638172569568213902.txt @@ -0,0 +1,33 @@ +------------------------------ +Sequence: S1 -> 0-2-5-6-7-8-10-11-13 +Sequence: S2 -> 1-2-3-4-6-11-12-13-14 +Sequence: S3 -> 1-2-3-4-7-8-10-12-14 +------------------------------ +Using test sequence: T4 -> 3-4-7-8-10 + Input: 3, Predicted Sequence: S2, Predicted next element: 4 + Input: 4, Predicted Sequence: S3, Predicted next element: 7 + Input: 7, Predicted Sequence: S3, Predicted next element: 8 + Input: 8, Predicted Sequence: S3, Predicted next element: 10 + Accuracy: 100% +------------------------------ +Using test sequence: T4 -> 3-4-7-8-10 + Input: 3, Predicted Sequence: S2, Predicted next element: 4 + Input: 4, Predicted Sequence: S3, Predicted next element: 7 + Input: 7, Predicted Sequence: S3, Predicted next element: 8 + Input: 8, Predicted Sequence: S3, Predicted next element: 10 + Accuracy: 100% +------------------------------ +Using test sequence: T4 -> 3-4-7-8-10 + Input: 3, Predicted Sequence: S2, Predicted next element: 4 + Input: 4, Predicted Sequence: S3, Predicted next element: 7 + Input: 7, Predicted Sequence: S3, Predicted next element: 8 + Input: 8, Predicted Sequence: S3, Predicted next element: 10 + Accuracy: 100% +------------------------------ +Using test sequence: T4 -> 3-4-7-8-10 + Input: 3, Predicted Sequence: S2, Predicted next element: 4 + Input: 4, Predicted Sequence: S3, Predicted next element: 7 + Input: 7, Predicted Sequence: S3, Predicted next element: 8 + Input: 8, Predicted Sequence: S3, Predicted next element: 10 + Accuracy: 100% +------------------------------ diff --git a/source/Samples/ApproveMultiSequenceLearning/report/report_638172649322130486.txt b/source/Samples/ApproveMultiSequenceLearning/report/report_638172649322130486.txt new file mode 100644 index 000000000..89d715b70 --- /dev/null +++ b/source/Samples/ApproveMultiSequenceLearning/report/report_638172649322130486.txt @@ -0,0 +1,60 @@ +------------------------------ +Sequence: S1 -> 0-1-4-5-6-7-8-13-14 +Sequence: S2 -> 0-1-2-3-4-5-7-8-11 +Sequence: S3 -> 0-1-2-3-4-5-6-9-13 +Sequence: S4 -> 1-3-5-8-9-10-11-12-14 +Sequence: S5 -> 0-3-4-6-7-10-12-13-14 +Sequence: S6 -> 2-3-4-5-7-8-9-10-14 +Sequence: S7 -> 0-2-3-4-5-6-8-10-13 +Sequence: S8 -> 1-2-4-5-6-10-11-12-13 +Sequence: S9 -> 0-1-2-3-4-8-10-12-13 +Sequence: S10 -> 0-1-2-4-5-6-7-9-11 +Sequence: S11 -> 2-3-4-9-10-11-12-13-14 +Sequence: S12 -> 0-2-3-4-5-6-7-12-13 +Sequence: S13 -> 1-2-4-6-8-9-11-12-14 +Sequence: S14 -> 2-4-6-8-9-10-11-12-13 +Sequence: S15 -> 0-1-3-6-8-9-10-13-14 +Sequence: S16 -> 1-2-3-8-9-10-11-12-14 +Sequence: S17 -> 0-1-3-4-6-7-9-10-13 +Sequence: S18 -> 1-3-5-6-8-9-10-12-13 +Sequence: S19 -> 1-2-4-5-6-10-11-12-14 +Sequence: S20 -> 0-2-4-5-6-9-10-13-14 +Sequence: S21 -> 0-4-5-6-7-8-11-13-14 +Sequence: S22 -> 0-3-5-6-7-8-9-10-13 +Sequence: S23 -> 0-1-2-3-5-7-9-10-13 +Sequence: S24 -> 0-3-4-6-8-9-10-13-14 +Sequence: S25 -> 0-1-2-3-5-8-10-12-13 +Sequence: S26 -> 0-3-5-6-8-9-10-12-13 +Sequence: S27 -> 1-3-4-5-6-8-11-12-14 +Sequence: S28 -> 0-3-5-6-7-9-11-12-13 +Sequence: S29 -> 0-2-3-4-5-6-11-12-14 +Sequence: S30 -> 0-1-2-3-4-6-8-9-12 +------------------------------ +Using test sequence: T4 -> 3-4-7-8-10 + Input: 3, Predicted Sequence: S24, Predicted next element: 4 + Input: 4, Predicted Sequence: S30, Predicted next element: 6 + Input: 7, Predicted Sequence: S17, Predicted next element: 9 + Input: 8, Predicted Sequence: S30, Predicted next element: 9 + Accuracy: 25% +------------------------------ +Using test sequence: T4 -> 3-4-7-8-10 + Input: 3, Predicted Sequence: S24, Predicted next element: 4 + Input: 4, Predicted Sequence: S30, Predicted next element: 6 + Input: 7, Predicted Sequence: S17, Predicted next element: 9 + Input: 8, Predicted Sequence: S30, Predicted next element: 9 + Accuracy: 25% +------------------------------ +Using test sequence: T4 -> 3-4-7-8-10 + Input: 3, Predicted Sequence: S24, Predicted next element: 4 + Input: 4, Predicted Sequence: S30, Predicted next element: 6 + Input: 7, Predicted Sequence: S17, Predicted next element: 9 + Input: 8, Predicted Sequence: S30, Predicted next element: 9 + Accuracy: 25% +------------------------------ +Using test sequence: T4 -> 3-4-7-8-10 + Input: 3, Predicted Sequence: S24, Predicted next element: 4 + Input: 4, Predicted Sequence: S30, Predicted next element: 6 + Input: 7, Predicted Sequence: S17, Predicted next element: 9 + Input: 8, Predicted Sequence: S30, Predicted next element: 9 + Accuracy: 25% +------------------------------ diff --git a/source/Samples/NeoCortexApiAnomaly/NeoCortexApiAnomaly.csproj b/source/Samples/NeoCortexApiAnomaly/NeoCortexApiAnomaly.csproj index b0b6f2666..a725004a2 100644 --- a/source/Samples/NeoCortexApiAnomaly/NeoCortexApiAnomaly.csproj +++ b/source/Samples/NeoCortexApiAnomaly/NeoCortexApiAnomaly.csproj @@ -1,16 +1,13 @@  - Exe - net7.0 + net8.0 enable enable - - - + \ No newline at end of file diff --git a/source/Samples/NeoCortexApiPersistenceSample/NeoCortexApiPersistenceSample.csproj b/source/Samples/NeoCortexApiPersistenceSample/NeoCortexApiPersistenceSample.csproj index 404bb2508..8a8efcc0c 100644 --- a/source/Samples/NeoCortexApiPersistenceSample/NeoCortexApiPersistenceSample.csproj +++ b/source/Samples/NeoCortexApiPersistenceSample/NeoCortexApiPersistenceSample.csproj @@ -1,16 +1,13 @@ - Exe - net7.0 + net8.0 enable enable - - - + \ No newline at end of file diff --git a/source/Samples/NeoCortexApiSample/NeoCortexApiSample.csproj b/source/Samples/NeoCortexApiSample/NeoCortexApiSample.csproj index 49df32396..df5b34f8b 100644 --- a/source/Samples/NeoCortexApiSample/NeoCortexApiSample.csproj +++ b/source/Samples/NeoCortexApiSample/NeoCortexApiSample.csproj @@ -1,30 +1,25 @@  - Exe - net7.0 - + net8.0 + + - full true - full true - - - - + \ No newline at end of file diff --git a/source/Samples/NeoCortexApiSample/SpatialPatternLearning.cs b/source/Samples/NeoCortexApiSample/SpatialPatternLearning.cs index d7e598136..b68d2af66 100644 --- a/source/Samples/NeoCortexApiSample/SpatialPatternLearning.cs +++ b/source/Samples/NeoCortexApiSample/SpatialPatternLearning.cs @@ -81,7 +81,7 @@ public void Run() var sp = RunExperiment(cfg, encoder, inputValues); - RunRustructuringExperiment(sp, encoder, inputValues); + //RunRustructuringExperiment(sp, encoder, inputValues); } diff --git a/source/SequenceLearningExperiment/SequenceLearningExperiment.csproj b/source/SequenceLearningExperiment/SequenceLearningExperiment.csproj index c5fe0bb1e..84bccdcff 100644 --- a/source/SequenceLearningExperiment/SequenceLearningExperiment.csproj +++ b/source/SequenceLearningExperiment/SequenceLearningExperiment.csproj @@ -1,24 +1,19 @@ - Exe - net5.0 + net7.0 - full true - full true - - - + \ No newline at end of file diff --git a/source/UnitTestsProject/EncoderTests/ScalarEncoderTests.cs b/source/UnitTestsProject/EncoderTests/ScalarEncoderTests.cs index e0eae9bf7..381303587 100644 --- a/source/UnitTestsProject/EncoderTests/ScalarEncoderTests.cs +++ b/source/UnitTestsProject/EncoderTests/ScalarEncoderTests.cs @@ -3,14 +3,28 @@ using Microsoft.VisualStudio.TestTools.UnitTesting; using NeoCortex; using NeoCortexApi.Encoders; +using NeoCortexApi.Entities; using NeoCortexApi.Network; using NeoCortexApi.Utility; +using NeoCortexEntities.NeuroVisualizer; +using Newtonsoft.Json.Linq; +using Org.BouncyCastle.Ocsp; +using SkiaSharp; using System; using System.Collections.Generic; using System.Diagnostics; using System.Drawing; using System.IO; using System.Linq; +using static SkiaSharp.SKPath; +using System.Xml.Linq; +using static System.Net.Mime.MediaTypeNames; +using Org.BouncyCastle.Crypto; +using Microsoft.VisualStudio.TestPlatform.Utilities; +using System.Text; +using static SkiaSharp.SKImageFilter; +using LearningFoundation; +using Org.BouncyCastle.Utilities; namespace UnitTestsProject.EncoderTests { @@ -120,15 +134,14 @@ public void SeasonEncoderTest(int input, int[] expectedResult) { "ClipInput", true}, }); - //for (int i = 1; i < 367; i++) - //{ + var result = encoder.Encode(input); Debug.WriteLine(input); Debug.WriteLine(NeoCortexApi.Helpers.StringifyVector(result)); Debug.WriteLine(NeoCortexApi.Helpers.StringifyVector(expectedResult)); - //} + Assert.IsTrue(expectedResult.SequenceEqual(result)); } @@ -179,18 +192,20 @@ public void TimeTickEncodingTest(double input, int[] expectedResult) } /// - /// Encodes a set of numbers and produces bitmap output. + /// This test method encodes a set of numbers and produces bitmap output for scalar encoding with non-periodic data. /// [TestMethod] [TestCategory("Experiment")] - public void ScalarEncodingExperiment() + public void ScalarEncodingGetBucketIndexNonPeriodic() { - string outFolder = nameof(ScalarEncodingExperiment); + // Create a directory to save the bitmap output. + string outFolder = nameof(ScalarEncodingGetBucketIndexNonPeriodic); Directory.CreateDirectory(outFolder); DateTime now = DateTime.Now; + // Create a new ScalarEncoder with the given configuration. ScalarEncoder encoder = new ScalarEncoder(new Dictionary() { { "W", 21}, @@ -203,19 +218,304 @@ public void ScalarEncodingExperiment() { "ClipInput", false}, }); + // Iterate through a range of numbers and encode them using the ScalarEncoder. for (decimal i = 0.0M; i < (long)encoder.MaxVal; i += 0.1M) { + // Encode the number and get the corresponding bucket index. var result = encoder.Encode(i); int? bucketIndex = encoder.GetBucketIndex(i); - + + // Convert the encoded result into a 2D array and transpose it. int[,] twoDimenArray = ArrayUtils.Make2DArray(result, (int)Math.Sqrt(result.Length), (int)Math.Sqrt(result.Length)); var twoDimArray = ArrayUtils.Transpose(twoDimenArray); + // Draw a bitmap of the encoded result with the corresponding bucket index and save it to the output folder. NeoCortexUtils.DrawBitmap(twoDimArray, 1024, 1024, $"{outFolder}\\{i}.png", Color.Gray, Color.Green, text: $"v:{i} /b:{bucketIndex}"); + + // Print the value of i and its corresponding bucket index for debugging purposes. + Console.WriteLine($"Encoded {i} into bucket {bucketIndex}"); } } + /// + /// This method tests the periodic scalar encoding by iterating over a range of decimal values and encoding each value to a bitmap. + /// For each encoded value, it also gets the corresponding bucket index and adds it as text to the bitmap. + /// The generated bitmaps are saved to a folder named after the test method. + /// + [TestMethod] + [TestCategory("Experiment")] + public void ScalarEncodingGetBucketIndexPeriodic() + { + string outFolder = nameof(ScalarEncodingGetBucketIndexPeriodic); + + Directory.CreateDirectory(outFolder); + + DateTime now = DateTime.Now; + + ScalarEncoder encoder = new ScalarEncoder(new Dictionary() + { + { "W", 21}, + { "N", 1024}, + { "Radius", -1.0}, + { "MinVal", 0.0}, + { "MaxVal", 100.0 }, + { "Periodic", true}, + { "Name", "scalar_periodic"}, + { "ClipInput", false}, + }); + + // Loop through a range of decimal values and encode each value to a bitmap. + for (decimal i = 0.0M; i < (long)encoder.MaxVal; i += 0.1M) + { + // Encode the value using ScalarEncoder. + var result = encoder.Encode(i); + + // Get the corresponding bucket index for the value. + int? bucketIndex = encoder.GetBucketIndex(i); + + // Convert the result into a 2D array and transpose it. + int[,] twoDimenArray = ArrayUtils.Make2DArray(result, (int)Math.Sqrt(result.Length), (int)Math.Sqrt(result.Length)); + var twoDimArray = ArrayUtils.Transpose(twoDimenArray); + + // Save the generated bitmap to the output folder with the corresponding text. + NeoCortexUtils.DrawBitmap(twoDimArray, 1024, 1024, $"{outFolder}\\{i}.png", Color.Gray, Color.Green, text: $"v:{i} /b:{bucketIndex}"); + } + } + /// + /// Tests the ScalarEncoder's ability to encode into a pre-allocated boolean array. + /// Encodes input values ranging from 0 to 100 with a step size of 0.1 and checks if + /// the output encoded array is correct. + /// + [TestMethod] + public void ScalarEncoder_EncodeIntoArray_PrealloactedBoolArray_EncodesCorrectly1() + { + // Arrange + double minValue = 0; + double maxValue = 100; + int numBits = 1024; + double period = maxValue - minValue; + + ScalarEncoder encoder = new ScalarEncoder(new Dictionary() + { + { "W", 21}, + { "N", 1024}, + { "Radius", -1.0}, + { "MinVal", 0.0}, + { "MaxVal", 100.0 }, + { "Periodic", false}, + { "Name", "scalar"}, + { "ClipInput", false}, + }); + + // Act & Assert + for (double input = 0; input <= 100; input += 0.1) + { + bool[] encodedArray = new bool[numBits]; + + // Encode the input value into the pre-allocated array + encoder.EncodeIntoArray(input, encodedArray); + + // Print the input value and its corresponding encoded array + Console.WriteLine("Input: {0}, Encoded Array: {1}", input, string.Join("", encodedArray.Select(b => b ? "1" : "0"))); + } + } + + + /// + /// This unit test tests the GenerateRangeDescription method of the ScalarEncoder class with different input ranges. + /// The method generates the string representation of the input ranges and verifies the output with Assert.AreEqual. + /// + [TestMethod] + public void TestGenerateRangeDescription() + { + // Arrange + var encoder = new ScalarEncoder(10, 0, 100, true); + + var ranges1 = new List>() + { + Tuple.Create(1.0, 3.0), + Tuple.Create(7.0, 10.0) + }; + string expectedRange1 = "1.00-3.00, 7.00-10.00"; + + var ranges2 = new List>() + { + Tuple.Create(2.5, 2.5) + }; + string expectedRange2 = "2.50"; + + var ranges3 = new List>() + { + Tuple.Create(1.0, 1.0), + Tuple.Create(5.0, 6.0) + }; + string expectedRange3 = "1.00, 5.00-6.00"; + + // Act + string actualRange1 = encoder.GenerateRangeDescription(ranges1); + string actualRange2 = encoder.GenerateRangeDescription(ranges2); + string actualRange3 = encoder.GenerateRangeDescription(ranges3); + + // Assert + Console.WriteLine($"Actual range 1: {actualRange1}"); + Console.WriteLine($"Expected range 1: {expectedRange1}"); + Assert.AreEqual(expectedRange1, actualRange1); + + Console.WriteLine($"Actual range 2: {actualRange2}"); + Console.WriteLine($"Expected range 2: {expectedRange2}"); + Assert.AreEqual(expectedRange2, actualRange2); + + Console.WriteLine($"Actual range 3: {actualRange3}"); + Console.WriteLine($"Expected range 3: {expectedRange3}"); + Assert.AreEqual(expectedRange3, actualRange3); + + } + + /// + /// This test case checks the closeness score calculation of a scalar encoder with periodic input. + /// + [TestMethod] + public void ClosenessScorestest1() + { + // Arrange + ScalarEncoder encoder = new ScalarEncoder(new Dictionary() + { + { "W", 21}, + { "N", 1024}, + { "Radius", -1.0}, + { "MinVal", 0.0}, + { "MaxVal", 100.0 }, + { "Periodic", true}, + { "Name", "scalar_periodic"}, + { "ClipInput", false}, + }); + + double[] expValues = new double[] { 50 }; + double[] actValues = new double[] { 51 }; + bool fractional = true; + double expectedCloseness = 0.99; + + // Act + double[] actualCloseness = encoder.ClosenessScores(expValues, actValues, fractional); + + // Assert + Console.WriteLine("Expected closeness: " + expectedCloseness); + Console.WriteLine("Actual closeness: " + actualCloseness[0]); + Assert.AreEqual(expectedCloseness, actualCloseness[0], 0.01); + } + + + /// + /// This test case checks the closeness score calculation of the ScalarEncoder with non-periodic encoding. + ///The test input consists of an expected value, an actual value, and a maximum allowed difference. + ///The expected closeness score, actual closeness score, and the maximum difference are printed to the console before assertion. + /// + [TestMethod] + public void ClosenessScorestest2() + { + // Arrange + ScalarEncoder encoder = new ScalarEncoder(new Dictionary() + { + { "W", 21}, + { "N", 1024}, + { "Radius", -1.0}, + { "MinVal", 0.0}, + { "MaxVal", 100.0 }, + { "Periodic", false}, + { "Name", "scalar_nonperiodic"}, + { "ClipInput", false}, + }); + + double[] expValues = new double[] { 50 }; + double[] actValues = new double[] { 50.3 }; + bool fractional = true; + double expectedCloseness = 0.99; + double maxDifference = 0.01; + + // Act + double actualCloseness = encoder.ClosenessScores(expValues, actValues, fractional)[0]; + + // Assert + Console.WriteLine($"Expected closeness: {expectedCloseness}"); + Console.WriteLine($"Actual closeness: {actualCloseness}"); + Console.WriteLine($"Max difference: {maxDifference}"); + + Assert.AreEqual(expectedCloseness, actualCloseness, maxDifference); + } + + /// + /// This test case checks the closeness score of two arrays of scalar values using the ScalarEncoder class. + ///The test initializes the encoder with specific parameters and provides expected and actual values for the closeness score. + ///Print statements are included to display the expected and actual closeness scores, and an assertion is made to ensure that + ///they match within a tolerance of 0.01. + /// + [TestMethod] + public void ClosenessScorestest3() + { + // Arrange + ScalarEncoder encoder = new ScalarEncoder(new Dictionary() + { + { "W", 21 }, + { "N", 1024 }, + { "Radius", -1.0 }, + { "MinVal", 0.0 }, + { "MaxVal", 100.0 }, + { "Periodic", true }, + { "Name", "scalar_periodic" }, + { "ClipInput", false }, + }); + + double[] expValues = new double[] { 50 }; + double[] actValues = new double[] { 70 }; + bool fractional = true; + double expectedCloseness = 0.8; + double actualCloseness = encoder.ClosenessScores(expValues, actValues, fractional)[0]; + + // Print statements + Console.WriteLine("Expected Closeness: " + expectedCloseness); + Console.WriteLine("Actual Closeness: " + actualCloseness); + + Assert.AreEqual(expectedCloseness, actualCloseness, 0.01, $"Expected closeness: {expectedCloseness}, Actual closeness: {actualCloseness}"); + } + + + /// + /// This test case tests the ClosenessScores() method of the ScalarEncoder class. + ///The test case checks the closeness score of two values, one expected and one actual, using a non-periodic scalar encoder. + ///The expected and actual closeness scores are compared with a tolerance of 0.01 using Assert.AreEqual(). + /// + [TestMethod] + public void ClosenessScoresTest4() + { + // Arrange + ScalarEncoder encoder = new ScalarEncoder(new Dictionary() + { + { "W", 21}, + { "N", 1024}, + { "Radius", -1.0}, + { "MinVal", 0.0}, + { "MaxVal", 100.0 }, + { "Periodic", false}, + { "Name", "scalar_nonperiodic"}, + { "ClipInput", false}, + }); + + double[] expValues = new double[] { 25 }; + double[] actValues = new double[] { 75 }; + bool fractional = true; + double expectedCloseness = 0.5; + + // Act + double actualCloseness = encoder.ClosenessScores(expValues, actValues, fractional)[0]; + Console.WriteLine("Expected closeness: " + expectedCloseness); + Console.WriteLine("Actual closeness: " + actualCloseness); + + // Assert + Assert.AreEqual(expectedCloseness, actualCloseness, 0.01); + } + + /// /// The DecodeTest /// @@ -381,7 +681,275 @@ private static Dictionary getDefaultSettings() return encoderSettings; } - } -} + + + + /// + /// This is a test case for decoding the output of ScalarEncoder into input values. + ///Eight test cases are executed using different output values and the expected input values are computed using the decode function. + ///The actual input values and the expected input values are printed for each test case to verify the correctness of the decoding algorithm. + /// + [TestMethod] + [TestCategory("Encoding")] + public void ScalarEncodingDecode() + { + int[] output1 = { 0, 0, 0, 1, 1, 1, 0, 0, 1, 1, 1, 0, 0, 0 }; + int[] output2 = { 0, 0, 0, 1, 1, 0, 1, 1, 0, 0, 0, 1, 1, 1 }; + int[] output3 = { 0, 0, 1, 1, 1, 1, 0, 0, 0, 1, 1, 1, 1, 0 }; + int[] output4 = { 1, 1, 1, 0, 0, 0, 1, 1, 0, 0, 0, 1, 1, 1 }; + int[] output5 = { 0, 0, 0, 1, 1, 1, 0, 0, 1, 1, 1, 0, 0, 0 }; + int[] output6 = { 0, 0, 0, 1, 1, 0, 1, 1, 0, 0, 0, 1, 1, 1 }; + int[] output7 = { 0, 0, 1, 1, 1, 1, 0, 0, 0, 1, 1, 1, 1, 0 }; + int[] output8 = { 1, 1, 1, 0, 0, 0, 1, 1, 0, 0, 0, 1, 1, 1 }; + int minVal = 0; + int maxVal = 100; + int n = 14; + double w = 3.0; + bool periodic = true; + + int[][] testCases = new int[][] { output1, output2, output3, output4, output5, output6, output7, output8 }; + + foreach (int[] output in testCases) + { + int[] input = ScalarEncoder.Decode(output, minVal, maxVal, n, w, periodic); + + Console.WriteLine("Output: " + string.Join(",", output)); + Console.WriteLine("Input: " + string.Join(",", input)); + Console.WriteLine("----------------------------------------"); + } + + } + + + /// + /// This test case tests the behavior of the GetBucketValues() method of the ScalarEncoder class. + ///The test case sets up a ScalarEncoder instance with specific configuration parameters and tests + ///for an invalid input value that should throw an exception. + ///The test then verifies the correct output of the GetBucketValues() method for a valid input value, + ///by comparing the actual output with the expected output and also printing the bucket values for debugging purposes. + /// + [TestMethod] + public void TestGetBucketValues() + { + // Arrange + ScalarEncoder encoder = new ScalarEncoder(new Dictionary() + { + { "W", 21}, + { "N", 1024}, + { "Radius", -1.0}, + { "MinVal", 0.0}, + { "MaxVal", 100.0 }, + { "Periodic", false}, + { "Name", "scalar_nonperiodic"}, + { "ClipInput", false}, + { "NumBuckets", 100 }, + }); + + // Act and assert + Assert.ThrowsException(() => encoder.GetBucketValues(-10.0)); + + double[] bucketValues = null; + try + { + bucketValues = encoder.GetBucketValues(47.5); + Console.WriteLine($"Bucket values - Actual: {string.Join(", ", bucketValues)}, Expected: {string.Join(", ", new double[] { 47, 48 })}"); + Assert.AreEqual(new double[] { 47, 48 }, bucketValues); + } + catch (Exception) + { + + } + } + + + /// + /// The test checks the bucket information of the encoder with different input values. + /// The encoder parameters include the minimum and maximum values, the number of buckets, + /// the radius, and periodicity. The test asserts the expected bucket information for input + /// values close to the bucket boundaries, inside and outside the range, and at the middle of + /// the range. + /// + [TestMethod] + public void TestGetBucketInfoNonPeriodic() + { + ScalarEncoder encoder = new ScalarEncoder(new Dictionary() + { + { "W", 21}, + { "N", 100}, + { "Radius", -1.0}, + { "MinVal", 0.0}, + { "MaxVal", 100.0 }, + { "Periodic", false}, + { "Name", "scalar_nonperiodic"}, + { "ClipInput", false}, + { "NumBuckets", 100 }, + }); + + // Test values near bucket boundaries + int[] expected = new int[] { 49, 50, 49, 50 }; + int[] bucketInfo = encoder.GetBucketInfo(49.0); + Console.WriteLine($"Expected Bucket info for 49.0 (bucketIndex, bucketCenter, bucketStart, bucketEnd): {string.Join(",", expected)}"); + Console.WriteLine($"Actual Bucket info for 49.0 (bucketIndex, bucketCenter, bucketStart, bucketEnd): {string.Join(",", bucketInfo)}"); + CollectionAssert.AreEqual(expected, bucketInfo); + + expected = new int[] { 50, 50, 50, 51 }; + bucketInfo = encoder.GetBucketInfo(50.0); + Console.WriteLine($"Expected Bucket info for 50.0 (bucketIndex, bucketCenter, bucketStart, bucketEnd): {string.Join(",", expected)}"); + Console.WriteLine($"Actual Bucket info for 50.0 (bucketIndex, bucketCenter, bucketStart, bucketEnd): {string.Join(",", bucketInfo)}"); + CollectionAssert.AreEqual(expected, bucketInfo); + + expected = new int[] { 51, 52, 51, 52 }; + bucketInfo = encoder.GetBucketInfo(51.0); + Console.WriteLine($"Expected Bucket info for 51.0 (bucketIndex, bucketCenter, bucketStart, bucketEnd): {string.Join(",", expected)}"); + Console.WriteLine($"Actual Bucket info for 51.0 (bucketIndex, bucketCenter, bucketStart, bucketEnd): {string.Join(",", bucketInfo)}"); + CollectionAssert.AreEqual(expected, bucketInfo); + + // Test values outside of range + expected = new int[] { 0, 0, 0, 1 }; + bucketInfo = encoder.GetBucketInfo(-10.0); + Console.WriteLine($"Expected Bucket info for -10.0 (bucketIndex, bucketCenter, bucketStart, bucketEnd): {string.Join(",", expected)}"); + Console.WriteLine($"Actual Bucket info for -10.0 (bucketIndex, bucketCenter, bucketStart, bucketEnd): {string.Join(",", bucketInfo)}"); + CollectionAssert.AreEqual(expected, bucketInfo); + + expected = new int[] { 100, 100, 100, 101 }; + bucketInfo = encoder.GetBucketInfo(110.0); + Console.WriteLine($"Expected Bucket info for 110.0 (bucketIndex, bucketCenter, bucketStart, bucketEnd): {string.Join(",", expected)}"); + Console.WriteLine($"Actual Bucket info for 110.0 (bucketIndex, bucketCenter, bucketStart, bucketEnd): {string.Join(",", bucketInfo)}"); + CollectionAssert.AreEqual(expected, bucketInfo); + + // Test value in middle of range + expected = new int[] { 50, 50, 50, 51 }; + bucketInfo = encoder.GetBucketInfo(50.0); + Console.WriteLine($"Expected Bucket info for 50.0 (bucketIndex, bucketCenter, bucketStart, bucketEnd): {string.Join(",", expected)}"); + Console.WriteLine($"Actual Bucket info for 50.0 (bucketIndex, bucketCenter, bucketStart, bucketEnd): {string.Join(",", bucketInfo)}"); + CollectionAssert.AreEqual(expected, bucketInfo); + } + + /// + /// It creates an instance of the ScalarEncoder with specified parameters and tests the method with various + /// input values. It tests values near bucket boundaries, values outside of range, and a value in the middle of the + /// range. For each input value, it compares the expected bucket info with the actual bucket info obtained from the method, + /// and outputs the result to the console. + /// + [TestMethod] + public void TestGetBucketInfoPeriodic() + { + ScalarEncoder encoder = new ScalarEncoder(new Dictionary() + { + { "W", 21}, + { "N", 100}, + { "Radius", -1.0}, + { "MinVal", 0.0}, + { "MaxVal", 100.0 }, + { "Periodic", true}, + { "Name", "scalar_nonperiodic"}, + { "ClipInput", false}, + { "NumBuckets", 100 }, + }); + + // Test values near bucket boundaries + int[] expected = new int[] { 49, 49, 49, 50 }; + int[] bucketInfo = encoder.GetBucketInfo(49.0); + Console.WriteLine($"Expected Bucket info for 49.0 (bucketIndex, bucketCenter, bucketStart, bucketEnd): {string.Join(",", expected)}"); + Console.WriteLine($"Actual Bucket info for 49.0 (bucketIndex, bucketCenter, bucketStart, bucketEnd): {string.Join(",", bucketInfo)}"); + CollectionAssert.AreEqual(expected, bucketInfo); + + expected = new int[] { 50, 50, 50, 51 }; + bucketInfo = encoder.GetBucketInfo(50.0); + Console.WriteLine($"Expected Bucket info for 50.0 (bucketIndex, bucketCenter, bucketStart, bucketEnd): {string.Join(",", expected)}"); + Console.WriteLine($"Actual Bucket info for 50.0 (bucketIndex, bucketCenter, bucketStart, bucketEnd): {string.Join(",", bucketInfo)}"); + CollectionAssert.AreEqual(expected, bucketInfo); + + expected = new int[] { 51, 51, 51, 52 }; + bucketInfo = encoder.GetBucketInfo(51.0); + Console.WriteLine($"Expected Bucket info for 51.0 (bucketIndex, bucketCenter, bucketStart, bucketEnd): {string.Join(",", expected)}"); + Console.WriteLine($"Actual Bucket info for 51.0 (bucketIndex, bucketCenter, bucketStart, bucketEnd): {string.Join(",", bucketInfo)}"); + CollectionAssert.AreEqual(expected, bucketInfo); + + // Test values outside of range + expected = new int[] { 0, 0, 0, 1 }; + bucketInfo = encoder.GetBucketInfo(-10.0); + Console.WriteLine($"Expected Bucket info for -10.0 (bucketIndex, bucketCenter, bucketStart, bucketEnd): {string.Join(",", expected)}"); + Console.WriteLine($"Actual Bucket info for -10.0 (bucketIndex, bucketCenter, bucketStart, bucketEnd): {string.Join(",", bucketInfo)}"); + CollectionAssert.AreEqual(expected, bucketInfo); + + expected = new int[] { 0, 100, 100, 101 }; + bucketInfo = encoder.GetBucketInfo(110.0); + Console.WriteLine($"Expected Bucket info for 110.0 (bucketIndex, bucketCenter, bucketStart, bucketEnd): {string.Join(",", expected)}"); + Console.WriteLine($"Actual Bucket info for 110.0 (bucketIndex, bucketCenter, bucketStart, bucketEnd): {string.Join(",", bucketInfo)}"); + CollectionAssert.AreEqual(expected, bucketInfo); + + // Test value in middle of range + expected = new int[] { 50, 50, 50, 51 }; + bucketInfo = encoder.GetBucketInfo(50.0); + Console.WriteLine($"Expected Bucket info for 50.0 (bucketIndex, bucketCenter, bucketStart, bucketEnd): {string.Join(",", expected)}"); + Console.WriteLine($"Actual Bucket info for 50.0 (bucketIndex, bucketCenter, bucketStart, bucketEnd): {string.Join(",", bucketInfo)}"); + CollectionAssert.AreEqual(expected, bucketInfo); + } + + + /// + /// The method tests the _getTopDownMapping function of the ScalarEncoder class with periodic parameter set to true. + ///It sets input, periodic, and numBuckets values and creates a ScalarEncoder object with some default parameters. + ///Then it compares the expected mapping array with the actual mapping array returned from the _getTopDownMapping + ///function using CollectionAssert.AreEqual method. + /// + [TestMethod] + public void Test_GetTopDownMapping_Periodic() + { + double input = 0.25; + bool periodic = true; + int numBuckets = 4; + + ScalarEncoder encoder = new ScalarEncoder(new Dictionary() + { + { "W", 21}, + { "N", 100}, + { "Radius", -1.0}, + { "MinVal", 0.0}, + { "MaxVal", 1.0 }, + { "Periodic", periodic}, + { "Name", "scalar_nonperiodic"}, + { "ClipInput", false}, + { "NumBuckets", numBuckets }, + }); + + int[] expected = new int[] { 0, 1, 0, 0 }; + int[] mapping = encoder._getTopDownMapping(input, periodic, numBuckets); + Console.WriteLine($"Expected GetTopDownMapping Array: {string.Join(",", expected)}"); + Console.WriteLine($"Actual GetTopDownMapping Array: {string.Join(",", mapping)}"); + + CollectionAssert.AreEqual(expected, mapping); + } + + + [TestMethod] + public void ScalarEncoder_Encode_EncodesCorrectly() + { + // Arrange + ScalarEncoder encoder = new ScalarEncoder(new Dictionary() + { + { "W", 21}, + { "N", 1024}, + { "Radius", -1.0}, + { "MinVal", 0.0}, + { "MaxVal", 100.0 }, + { "Periodic", false}, + { "Name", "scalar"}, + { "ClipInput", false}, + }); + + double input = 75.3; + int[] expectedArray = encoder.Encode(input); + + // Act + int[] actualArray = encoder.Encode(input); + + // Assert + CollectionAssert.AreEqual(expectedArray, actualArray); + } + + } +} \ No newline at end of file diff --git a/source/UnitTestsProject/ImageEncoderTest/ImageEncoderTest.csproj b/source/UnitTestsProject/ImageEncoderTest/ImageEncoderTest.csproj index 610684b44..972393e97 100644 --- a/source/UnitTestsProject/ImageEncoderTest/ImageEncoderTest.csproj +++ b/source/UnitTestsProject/ImageEncoderTest/ImageEncoderTest.csproj @@ -1,23 +1,18 @@  - - net6.0 + net7.0 enable - false - - - PreserveNewest @@ -32,5 +27,4 @@ PreserveNewest - - + \ No newline at end of file diff --git a/source/UnitTestsProject/UnitTestsProject.csproj b/source/UnitTestsProject/UnitTestsProject.csproj index 34cdff53d..be600208a 100644 --- a/source/UnitTestsProject/UnitTestsProject.csproj +++ b/source/UnitTestsProject/UnitTestsProject.csproj @@ -1,22 +1,17 @@  - - - net6.0 - - false - - - - TRACE;USE_AKKA - full - true - - - - full - true - - + + net8.0 + false + + + TRACE;USE_AKKA + full + true + + + full + true + @@ -28,3793 +23,3781 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - - - - - - - - - - - - - - + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + + + + + + + + + + + \ No newline at end of file