Select Git revision
databaseCryptoUpdated4.sh
Program.cs 40.36 KiB
using MathNet.Numerics.LinearAlgebra;
using MathNet.Numerics.Random;
using OxyPlot;
using OxyPlot.Axes;
using OxyPlot.Series;
using System;
using System.Collections.Concurrent;
using System.Collections.Generic;
using System.IO;
using System.Linq;
namespace IVMCTrim
{
class Program
{
static void Main(string[] args)
{
Run("Z000", Standard(0.0), 42);
Run("FixedHigh", FixedHigh, 42);
Run("Z050", Standard(0.5), 1142);
Run("Z075", Standard(0.75), 1642);
Run("Z095", Standard(0.95), 2042);
Run("Z000PerfectG", PerfectG(0.0), 42);
}
public static void Run(string outDir, Parameters parameters, int seed)
{
Console.WriteLine($"Running into {outDir}");
System.IO.Directory.CreateDirectory(outDir);
string path(string f) => System.IO.Path.Combine(outDir, f);
var ctx = new ExecutionContext(new MersenneTwister(seed, false));
int N = 16;
var individual = Individual.ZeroGenome(N);
var target = new IvmcTarget(4, 4);
var recorders = new Recorders(target.ModuleCount, target.ModuleSize);
// doesn't make a lot of sense
recorders.Sample(null, individual, default(Judgement), 0);
// run the simulation
for (int epoch = 1; epoch <= parameters.TotalEpochs; epoch++)
{
var judgement = Model.RunEpoch(epoch, ctx, parameters, target, ref individual);
recorders.Sample(target, individual, judgement, epoch);
if (epoch % 1000 == 0)
{
// reporting
Console.WriteLine($"{epoch}\t{judgement.Fitness}");
var matPlot = PlotHelpers.IntegralCartesianAxes($"Epoch {epoch}", "row", "col");
matPlot.Axes.Add(PlotHelpers.Gray());
matPlot.Series.Add(PlotHelpers.PlotMatrix(individual.B));
matPlot.ExportPdf(path($"epoch{epoch}.pdf"), 0.5, true);
}
}
// data export and mindless plotting
var rcsData = recorders.GetRcsDataFlat();
TrajectoryHelpers.SaveTrajectories(path("rcs_t.dat"), rcsData, recorders.Grns.SamplePeriod);
var rcsPlot = PlotHelpers.LinearAxes("Regulation Coefficients", "Epoch", "Connection Strength");
rcsPlot.Series.AddRange(PlotHelpers.PlotTrajectoryLines2D(recorders.GetRcsData(), recorders.Grns.SamplePeriod, OxyColors.DarkCyan, OxyColors.LightCyan, OxyColors.DarkGreen, OxyColors.LightGreen));
rcsPlot.ExportPdf(path("rcs_t.pdf"), 0.5, false);
var huskyPlot = PlotHelpers.LinearAxes("Evolution of Hierarchy", "Epoch", "Degree of Hierarchy");
huskyPlot.Series.AddRange(
PlotHelpers.PlotTrajectoryLines1D(
recorders.Grns.Samples.Select(grn => Analysis.ComputeBlockModuleHuskynesses(grn, target.ModuleSize).ToList()).ToArray().Transpose(),
recorders.Grns.SamplePeriod,
OxyColors.DarkSlateGray, OxyColors.SlateGray));
huskyPlot.ExportPdf(path("hierarchy_t.pdf"), 0.5, false);
var moduleIndependencePlot = PlotHelpers.LinearAxes("Evolution of ModuleIndependence", "Epoch", "Degree of Hierarchy");
moduleIndependencePlot.Series.AddRange(
PlotHelpers.PlotTrajectoryLines1D(
recorders.Grns.Samples.Select(grn => Analysis.ComputeBlockModuleIndependence(grn, target.ModuleSize).ToList()).ToArray().Transpose(),
recorders.Grns.SamplePeriod,
OxyColors.Teal, OxyColors.DarkTurquoise));
moduleIndependencePlot.ExportPdf(path("moduleIndpednence_t.pdf"), 0.5, false);
var fitnessData = recorders.GetFitnessData();
TrajectoryHelpers.SaveTrajectories(path("fitness_t.dat"), fitnessData, recorders.Judgements.SamplePeriod);
var fitnessPlot = PlotHelpers.LinearAxes("Fitness", "Epoch", "Fitness");
fitnessPlot.Series.AddRange(PlotHelpers.PlotTrajectoryScatters1D(fitnessData.Take(1).ToArray().DownsampleTrajectories(100), recorders.Judgements.SamplePeriod * 100, OxyColors.Red, OxyColors.DarkOrange));
fitnessPlot.ExportPdf(path("fitness_t.pdf"), 0.5, false);
var meanFitnessPlot = PlotHelpers.LinearAxes("Mean Fitness", "Epoch", "Mean Fitness");
fitnessPlot.Series.AddRange(PlotHelpers.PlotTrajectoryScatters1D(fitnessData.Take(1).ToArray().MeanDownsampleTrajectories(100), recorders.Judgements.SamplePeriod * 100, OxyColors.Red, OxyColors.DarkOrange));
fitnessPlot.ExportPdf(path("meanfitness_t.pdf"), 0.5, false);
var solveData = recorders.GetSolveData().Map2D(x => (double)x);
TrajectoryHelpers.SaveTrajectories(path("ivmcsolves_t.dat"), solveData, recorders.Solves.SamplePeriod);
var solvePlot = PlotHelpers.LinearAxes("Solves", "Epoch", "Solve Frequency");
solvePlot.Series.AddRange(PlotHelpers.PlotTrajectoryScatters1D(solveData.Map2D(x => x / recorders.Solves.SamplePeriod).TakeLast(1).ToArray(), recorders.Solves.SamplePeriod, OxyColors.IndianRed, OxyColors.DarkRed));
solvePlot.ExportPdf(path("solves_t.pdf"), 0.5, false);
var switchData = recorders.GetSwitchData().Map2D(x => (double)x);
TrajectoryHelpers.SaveTrajectories(path("ivmcswitches_t.dat"), switchData, recorders.Switches.SamplePeriod);
var switchPlot = PlotHelpers.LinearAxes("Switches", "Epoch", "Switch Frequency");
switchPlot.Series.AddRange(PlotHelpers.PlotTrajectoryScatters1D(switchData.Map2D(x => x / recorders.Switches.SamplePeriod).TakeLast(1).ToArray(), recorders.Switches.SamplePeriod, OxyColors.DeepPink, OxyColors.HotPink));
switchPlot.ExportPdf(path("switches_t.pdf"), 0.5, false);
}
public static Parameters Standard(double z)
{
return new Parameters()
{
DualPeakedEnvironmentalInstanceFrequency = z,
EpochStartOperation = Model.VaryEnvironment,
};
}
public static Parameters PerfectG(double z)
{
return new Parameters()
{
DualPeakedEnvironmentalInstanceFrequency = z,
EpochStartOperation = Model.VaryEnvironmentWithPerfectG,
};
}
public static Parameters FixedHigh
{
get
{
return new Parameters()
{
EpochStartOperation = Model.FixEnvironmentAndG,
};
}
}
}
public class Parameters
{
// should be equivalent to
// m4m gen=composedense bprob=0.5 BEx=true lambda=0.1 Z=0.95 regfunc=l1 ch=1.0 cl=0.7 c0=-1.0 epochs=120000 mb=1E-3 judgemode=Split K=1000 targetpackage=ivmc:4 transmatmutator=singlecell
public double BMutationRate { get; set; } = 0.001;
public double MB => BMutationRate;
public bool AllowGMutation { get; set; } = true;
public int NumberOfDevelopmentalTimeSteps { get; set; } = 10;
public int T => NumberOfDevelopmentalTimeSteps;
public double DualPeakedEnvironmentalInstanceFrequency { get; set; } = 0.95;
public double Z => DualPeakedEnvironmentalInstanceFrequency;
public double CostCoefficient { get; set; } = 0.1;
public double λ => CostCoefficient;
public double DecayRate { get; set; } = 0.2;
public double τ => DecayRate;
public Func<double, double> Squash { get; set; }
= x => Math.Tanh(x * 0.5);
public double HighPeakBenefit { get; set; } = 1.0;
public double CH => HighPeakBenefit;
public double LowPeakBenefit { get; set; } = 0.7;
public double CL => LowPeakBenefit;
public double NoPeakBenefit { get; set; } = -1;
public double C0 => NoPeakBenefit;
public int GenerationsPerEpoch { get; set; } = 1000;
public int K => GenerationsPerEpoch;
public int TotalEpochs { get; set; } = 120_000;
public EpochStartOperation EpochStartOperation { get; set; } = null;
}
public delegate void EpochStartOperation(ExecutionContext ctx, Parameters parameters, Individual individual, IvmcTarget target);
public class ExecutionContext
{
public ExecutionContext(Random rand)
{
Rand = rand ?? throw new ArgumentNullException(nameof(rand));
VectorPool = new Pool<int, Vector<double>>(CreateVector.Dense<double>);
}
public Random Rand { get; }
public Pool<int, Vector<double>> VectorPool { get; }
}
public class Individual
{
public Individual(Vector<double> g, Matrix<double> b)
{
if (g.Count != b.RowCount || b.RowCount != b.ColumnCount)
throw new ArgumentException("Mismatched dimensions");
G = g;
B = b;
P = CreateVector.Dense<double>(Size);
}
public Vector<double> G { get; }
public Matrix<double> B { get; }
public Vector<double> P { get; }
public int Size => G.Count;
public static Individual ZeroGenome(int size)
{
return new Individual(CreateVector.Dense<double>(size), CreateMatrix.Dense<double>(size, size));
}
public static void Mutate(ExecutionContext ctx, Parameters parameters, Individual parent, Individual offspring)
{
var rand = ctx.Rand;
if (parent.Size != offspring.Size)
throw new ArgumentException("Mismatched dimensions");
bool mutateB = rand.NextBool(0.5);
parent.B.CopyTo(offspring.B);
parent.G.CopyTo(offspring.G);
if (mutateB)
{
// pick a random cell
int r = rand.Next(parent.Size);
int c = rand.Next(parent.Size);
// draw delta
double delta = rand.NextSignedUniform(parameters.BMutationRate);
// additive update (unclamped)
offspring.B[r, c] = delta + offspring.B[r, c];
}
else if (parameters.AllowGMutation) // mutate G
{
// pick a random gene
int r = rand.Next(parent.Size);
// assign randomly (not a flip, but that should 'work' as well)
offspring.G[r] = rand.NextBool(0.5) ? 1 : -1;
}
offspring.DevelopInplace(ctx, parameters);
}
public void DevelopInplace(ExecutionContext ctx, Parameters parameters)
{
// fetch temporaries
using var _update = ctx.VectorPool.Borrow(Size, out var update);
using var _state = ctx.VectorPool.Borrow(Size, out var state);
G.CopyTo(state);
for (int t = 0; t < parameters.NumberOfDevelopmentalTimeSteps; t++)
{
// TODO: may need to refit this to produce exact replicates
//B.Multiply(state, update);
//state.Multiply(1 - parameters.DecayRate, state);
//update.MapInplace(parameters.Squash);
//state.Add(update, state);
// eh, doesn't seem to make any difference
B.Multiply(state, update);
double rt = 1 - parameters.DecayRate;
// fast path dense storage
if (state.Storage is MathNet.Numerics.LinearAlgebra.Storage.DenseVectorStorage<double> sdvs && update.Storage is MathNet.Numerics.LinearAlgebra.Storage.DenseVectorStorage<double> udvs)
{
var sdata = sdvs.Data;
var udata = udvs.Data;
for (int i = 0; i < sdata.Length; i++)
{
sdata[i] = sdata[i] * rt + parameters.Squash(udata[i]);
}
}
else
{
for (int i = 0; i < Size; i++)
{
state[i] = state[i] * rt + parameters.Squash(update[i]);
}
}
}
// normalise to [-1, +1]
state.Multiply(parameters.DecayRate, P);
}
}
public struct Judgement
{
public Judgement(double fitness, double benefit, double cost)
{
Fitness = fitness;
Benefit = benefit;
Cost = cost;
}
public double Fitness { get; }
public double Benefit { get; }
public double Cost { get; }
}
public class IvmcTarget
{
public IvmcTarget(int moduleSize, int moduleCount)
{
ModuleSize = moduleSize;
ModuleCount = moduleCount;
PlusCoefs = new double[ModuleCount];
MinusCoefs = new double[ModuleCount];
}
public int ModuleSize { get; }
public int ModuleCount { get; }
public int Size => ModuleCount * ModuleSize;
public double[] PlusCoefs { get; }
public double[] MinusCoefs { get; }
public void Randomise(ExecutionContext ctx, Parameters parameters)
{
var rand = ctx.Rand;
for (int i = 0; i < ModuleCount; i++)
{
bool dualPeaked = rand.NextBool(parameters.Z);
bool minusHigh = rand.NextBoolean();
PlusCoefs[i] = !minusHigh
? parameters.CH
: (dualPeaked ? parameters.CL : parameters.C0);
MinusCoefs[i] = minusHigh
? parameters.CH
: (dualPeaked ? parameters.CL : parameters.C0);
}
}
public double JudgeBenefit(Vector<double> phenotype)
{
double benefitAcc = 0;
for (int i = 0; i < ModuleCount * ModuleSize; i += ModuleSize)
{
double acc = 0;
for (int j = i; j < i + ModuleSize; j++)
acc += phenotype[j];
acc /= ModuleSize; // average
double moduleFitness = acc > 0
? acc * PlusCoefs[i / ModuleSize]
: -acc * MinusCoefs[i / ModuleSize];
benefitAcc += moduleFitness;
}
return benefitAcc / ModuleCount; // mean fitness of all modules
}
}
public static class Model
{
public static EpochStartOperation VaryEnvironment = (ExecutionContext ctx, Parameters parameters, Individual individual, IvmcTarget target) =>
{
target.Randomise(ctx, parameters);
};
public static EpochStartOperation FixEnvironmentAndG = (ExecutionContext ctx, Parameters parameters, Individual individual, IvmcTarget target) =>
{
for (int i = 0; i < target.ModuleCount; i++)
{
target.PlusCoefs[i] = parameters.HighPeakBenefit;
target.MinusCoefs[i] = parameters.LowPeakBenefit;
}
individual.G.Clear();
individual.G.Add(1, individual.G);
individual.DevelopInplace(ctx, parameters);
};
public static EpochStartOperation VaryEnvironmentWithPerfectG = (ExecutionContext ctx, Parameters parameters, Individual individual, IvmcTarget target) =>
{
VaryEnvironment(ctx, parameters, individual, target);
var perfectP = target.PerfectP();
perfectP.CopyTo(individual.G);
individual.DevelopInplace(ctx, parameters);
};
public static double JudgeCost(Matrix<double> grn)
{
double acc = 0.0;
for (int i = 0; i < grn.RowCount; i++)
{
for (int j = 0; j < grn.ColumnCount; j++)
{
acc += Math.Abs(grn[i, j]);
}
}
return acc / ((double)grn.ColumnCount * grn.RowCount);
}
public static Judgement Judge(ExecutionContext ctx, Parameters parameters, IvmcTarget target, Individual individual)
{
var benefit = target.JudgeBenefit(individual.P);
var cost = JudgeCost(individual.B);
var fitness = benefit - cost * parameters.CostCoefficient;
return new Judgement(fitness, benefit, cost);
}
public static Judgement RunEpoch(int epoch, ExecutionContext ctx, Parameters parameters, IvmcTarget target, ref Individual individual)
{
parameters.EpochStartOperation(ctx, parameters, individual, target);
// reset check (compat)
ctx.Rand.NextDouble();
var judgement = Judge(ctx, parameters, target, individual);
var candidate = Individual.ZeroGenome(individual.Size);
for (int generation = 0; generation < parameters.GenerationsPerEpoch; generation++)
{
Individual.Mutate(ctx, parameters, individual, candidate);
var candidateJudgement = Judge(ctx, parameters, target, candidate);
if (candidateJudgement.Fitness > judgement.Fitness)
{
// swap
(individual, candidate) = (candidate, individual);
judgement = candidateJudgement;
}
}
return judgement;
}
}
public static class Analysis
{
public static Vector<double> PerfectP(this IvmcTarget target)
{
var p = CreateVector.Dense<double>(target.Size);
for (int i = 0; i < target.ModuleCount; i++)
{
var val = target.PlusCoefs[i] > target.MinusCoefs[i] ? 1 : -1;
for (int j = 0; j < target.ModuleSize; j++)
{
p[i * target.ModuleSize + j] = val;
}
}
return p;
}
/// <summary>
/// Computes the huskyness of sequencial block modules of the given size
/// </summary>
public static IEnumerable<double> ComputeBlockModuleHuskynesses(Matrix<double> dtm, int moduleSize)
{
for (int i = 0; i < dtm.ColumnCount; i += moduleSize)
{
yield return ComputeModuleHuskyness(dtm, Enumerable.Range(i, moduleSize).ToList());
}
}
/// <summary>
/// Computes the huskyness of sequencial block modules of the given size
/// </summary>
public static IEnumerable<double> ComputeBlockModuleIndependence(Matrix<double> dtm, int moduleSize)
{
for (int i = 0; i < dtm.ColumnCount; i += moduleSize)
{
yield return ComputeModuleIndependence(dtm, Enumerable.Range(i, moduleSize).ToList());
}
}
/// <summary>
/// Computes the huskyness of a module containing 2 or more traits
/// huskyness = (max_col_weight / total_weight), scaled between 0 (dense/nothing) and 1 (ideal husky)
/// Returns 0 if there are no weights
/// </summary>
public static double ComputeModuleHuskyness(Matrix<double> dtm, IReadOnlyList<int> moduleTraits)
{
double[] columnSums = moduleTraits.Select(j => moduleTraits.Sum(i => Math.Abs(dtm[i, j]))).ToArray();
int count = columnSums.Length;
if (count < 2)
throw new ArgumentException(nameof(moduleTraits), "A module must comprise 2 or more traits");
double max = columnSums.Max();
if (max == 0) // avoid div0: this is not a husky
return 0.0;
double total = columnSums.Sum();
double hratio = max / total;
if (double.IsNaN(hratio) || double.IsInfinity(hratio))
return double.NaN; // probably ought throw...
// re-scale hratio from [1/n, 1] to [0, 1]
int n = count;
return (hratio - (1.0 / n)) / (1.0 - (1.0 / n));
}
/// <summary>
/// Computes the independence of a module
/// module_independence = (weight_within_module / weight_in_module_row), on scale between 0 (no control) and 1 (total independence)
/// Returns NaN if there are no weights
/// </summary>
public static double ComputeModuleIndependence(Matrix<double> dtm, IReadOnlyList<int> moduleTraits)
{
double withinSum = 0.0;
double rowSum = 0.0;
foreach (var i in moduleTraits)
{
foreach (var j in moduleTraits)
{
withinSum += Math.Abs(dtm[i, j]);
}
for (int j = 0; j < dtm.ColumnCount; j++)
{
rowSum += Math.Abs(dtm[i, j]);
}
}
var iratio = withinSum / rowSum;
if (double.IsNaN(iratio) || double.IsInfinity(iratio))
return double.NaN;
return iratio;
}
}
public class Recorder<T>
{
public Recorder(Func<IvmcTarget, Individual, Judgement, T> sampler, int samplePeriod)
{
Sampler = sampler ?? throw new ArgumentNullException(nameof(sampler));
SamplePeriod = samplePeriod;
}
public Func<IvmcTarget, Individual, Judgement, T> Sampler { get; }
public int SamplePeriod { get; }
public List<T> Samples { get; } = new List<T>();
public void Sample(IvmcTarget target, Individual individual, Judgement judgement, int epoch)
{
if (epoch % SamplePeriod == 0)
Samples.Add(Sampler(target, individual, judgement));
}
}
public class CountRecorder
{
public CountRecorder(int sampleSize, Action<IvmcTarget, Individual, Judgement, int[]> counter, int samplePeriod)
{
Counter = counter;
SamplePeriod = samplePeriod;
SampleSize = sampleSize;
}
public Action<IvmcTarget, Individual, Judgement, int[]> Counter { get; }
public int SampleSize { get; }
public int SamplePeriod { get; }
private int[] CurrentCount = null;
public List<int[]> Samples { get; } = new List<int[]>();
public void Sample(IvmcTarget target, Individual individual, Judgement judgement, int epoch)
{
if (CurrentCount == null)
{
CurrentCount = new int[SampleSize];
}
Counter(target, individual, judgement, CurrentCount);
if (epoch % SamplePeriod == 0)
{
Samples.Add(CurrentCount);
CurrentCount = new int[SampleSize];
}
}
}
public class Recorders
{
public Recorders(int moduleCount, int moduleSize)
{
Solves = new CountRecorder(moduleCount + 1, CountSolves, OtherSamplePeriod);
Switches = new CountRecorder(moduleCount + 1, CountSwitches(), OtherSamplePeriod);
}
private static Action<IvmcTarget, Individual, Judgement, int[]> CountSwitches()
{
Vector<double> last = null;
void count(IvmcTarget target, Individual individual, Judgement judgement, int[] switches)
{
if (last == null)
{
last = individual.P.Clone();
return;
}
for (int i = 0; i < target.ModuleCount; i++)
{
var val = target.PlusCoefs[i] > target.MinusCoefs[i] ? 1 : -1;
bool isHard = target.PlusCoefs[i] >= 0 && target.MinusCoefs[i] >= 0;
bool allOk = true;
for (int j = 0; j < target.ModuleSize; j++)
{
bool isSwitch = Math.Sign(individual.P[j]) == -Math.Sign(last[j]);
// NOTE: this doesn't check if we were correct (per M4M)
if (!isHard || !isSwitch)
{
allOk = false;
break;
}
}
if (allOk)
{
switches[i]++;
switches[target.ModuleCount]++;
}
}
individual.P.CopyTo(last);
}
return count;
}
private static void CountSolves(IvmcTarget target, Individual individual, Judgement judgement, int[] wins)
{
if (target == null)
return; // pre-start
bool allAllOk = true;
for (int i = 0; i < target.ModuleCount; i++)
{
var val = target.PlusCoefs[i] > target.MinusCoefs[i] ? 1 : -1;
bool allOk = true;
for (int j = 0; j < target.ModuleSize; j++)
{
if (Math.Sign(individual.P[j]) != val)
{
allOk = false;
break;
}
}
if (allOk)
wins[i]++;
else
allAllOk = false;
}
if (allAllOk)
wins[target.ModuleCount]++;
}
private const int GrnSamplePeriod = 100;
private const int JudgementSamplePeriod = 1;
private const int OtherSamplePeriod = 100;
public Recorder<Judgement> Judgements { get; } = new Recorder<Judgement>((t, i, j) => j, JudgementSamplePeriod);
public Recorder<Matrix<double>> Grns { get; } = new Recorder<Matrix<double>>((t, i, j) => i.B.Clone(), GrnSamplePeriod);
public Recorder<Vector<double>> Ps { get; } = new Recorder<Vector<double>>((t, i, j) => i.P.Clone(), OtherSamplePeriod);
public CountRecorder Solves { get; }
public CountRecorder Switches { get; }
public void Sample(IvmcTarget target, Individual individual, Judgement judgement, int epoch)
{
Judgements.Sample(target, individual, judgement, epoch);
Grns.Sample(target, individual, judgement, epoch);
Ps.Sample(target, individual, judgement, epoch);
Solves.Sample(target, individual, judgement, epoch);
Switches.Sample(target, individual, judgement, epoch);
}
public double[][] GetFitnessData()
{
return new[] {
Judgements.Samples.Select(j => j.Fitness).ToArray(),
Judgements.Samples.Select(j => j.Benefit).ToArray(),
Judgements.Samples.Select(j => j.Fitness - j.Benefit).ToArray() // cost * lambda
};
}
public int[][] GetSolveData()
{
return Solves.Samples.Transpose();
}
public int[][] GetSwitchData()
{
return Switches.Samples.Transpose();
}
public double[][] GetRcsDataFlat()
{
var N = Grns.Samples[0].RowCount;
return Enumerable.Range(0, N * N).Select(i => Grns.Samples.Select(grn => grn[i / N, i % N]).ToArray()).ToArray();
}
public double[][][] GetRcsData()
{
var N = Grns.Samples[0].RowCount;
return Enumerable.Range(0, N).Select(i => Enumerable.Range(0, N).Select(j => Grns.Samples.Select(grn => grn[i, j]).ToArray()).ToArray()).ToArray();
}
}
// C&P'd from M4M
public static class TrajectoryHelpers
{
public static TOut[][] Map2D<TIn, TOut>(this TIn[][] arr, Func<TIn, TOut> map)
{
return arr.Select(a => a.Select(map).ToArray()).ToArray();
}
public static T[][] Transpose<T>(this IReadOnlyList<IReadOnlyList<T>> samples)
{
var l = samples.Count;
var n = samples[0].Count;
var res = new T[n][];
for (int i = 0; i < n; i++)
res[i] = new T[l];
for (int i = 0; i < l; i++)
for (int j = 0; j < n; j++)
res[j][i] = samples[i][j];
return res;
}
public static void SaveTrajectories(String filename, double[][] trajectories, int samplePeriod)
{
using (System.IO.FileStream fs = System.IO.File.Open(filename, System.IO.FileMode.Create))
{
fs.WriteInt32(trajectories.Length); // length
fs.WriteInt32(trajectories[0].Length); // width
for (int i = 0; i < trajectories.Length; i++)
{
double[] curr = trajectories[i];
for (int j = 0; j < curr.Length; j++)
{
fs.WriteFloat64(curr[j]);
}
}
if (samplePeriod >= 0)
{
fs.WriteInt32(-1);
fs.WriteInt32(samplePeriod);
}
}
}
public static double[][] LoadTrajectories(String filename, out int samplePeriod)
{
using (System.IO.FileStream fs = System.IO.File.Open(filename, System.IO.FileMode.Open))
{
int length = fs.ReadInt32();
int width = fs.ReadInt32();
double[][] trajectories = new double[length][];
for (int i = 0; i < trajectories.Length; i++)
{
double[] curr = trajectories[i] = new double[width];
for (int j = 0; j < curr.Length; j++)
{
curr[j] = fs.ReadFloat64();
}
}
if (fs.Position < fs.Length)
{ // there is more
int signaller = fs.ReadInt32();
samplePeriod = fs.ReadInt32();
}
else
{
samplePeriod = -1;
}
return trajectories;
}
}
}
public static class PlotHelpers
{
public static void AddRange<T>(this ElementCollection<T> col, IEnumerable<T> things)
where T : PlotElement
{
foreach (var thing in things)
col.Add(thing);
}
public static void ExportPdf(this PlotModel plot, string filename, double size, bool forceAspect)
{
PdfExporter exporter = new PdfExporter();
plot.ResetAllAxes();
plot.InvalidatePlot(true);
((IPlotModel)plot).Update(true);
double width = 297 / 25.4 * 72 * size; // A4
double height = 210 / 25.4 * 72 * size;
exporter.Width = width;
exporter.Height = height;
void doPlot()
{
plot.ResetAllAxes();
plot.InvalidatePlot(true);
using (var fs = new System.IO.FileStream(filename, FileMode.Create))
{
exporter.Export(plot, fs);
}
}
void doForceAspect()
{
if (plot.PlotArea.Width > plot.PlotArea.Height)
{
width = width - plot.PlotArea.Width + plot.PlotArea.Height;
}
else
{
height = height + plot.PlotArea.Width - plot.PlotArea.Height;
}
exporter.Width = width;
exporter.Height = height;
}
doPlot();
if (forceAspect)
{
doForceAspect();
doPlot();
}
}
public static LinearColorAxis Gray()
{
return new LinearColorAxis() { Position = AxisPosition.Right, Palette = OxyPalettes.Gray(100) };
}
public static PlotModel IntegralCartesianAxes(string title, string x, string y)
{
var plot = new PlotModel() { Title = title, PlotType = PlotType.Cartesian };
var xaxis = new LinearAxis() { Key = "x", Title = x, Position = AxisPosition.Bottom, MinimumMinorStep = 1, MinimumMajorStep = 1 };
var yaxis = new LinearAxis() { Key = "y", Title = y, Position = AxisPosition.Left, MinimumMinorStep = 1, MinimumMajorStep = 1 };
plot.Axes.Add(xaxis);
plot.Axes.Add(yaxis);
return plot;
}
public static PlotModel LinearAxes(string title, string x, string y)
{
var plot = new PlotModel() { Title = title };
var xaxis = new LinearAxis() { Key = "x", Title = x, Position = AxisPosition.Bottom };
var yaxis = new LinearAxis() { Key = "y", Title = y, Position = AxisPosition.Left };
plot.Axes.Add(xaxis);
plot.Axes.Add(yaxis);
return plot;
}
public static HeatMapSeries PlotMatrix(Matrix<double> matrix)
{
double[,] darr = matrix.Transpose().ToArray();
var hms = new HeatMapSeries()
{
Interpolate = false,
Data = darr,
X0 = 0,
X1 = matrix.ColumnCount - 1,
Y0 = 0,
Y1 = matrix.ColumnCount - 1,
};
return hms;
}
public static LineSeries[] PlotTrajectoryLines1D(double[][] trajectories, int samplePeriod, OxyColor c0, OxyColor c1)
{
var lines = new LineSeries[trajectories.Length];
for (int i = 0; i < trajectories.Length; i++)
{
var ls = lines[i] = new LineSeries()
{
Color = OxyColor.Interpolate(c0, c1, i / (double)Math.Max(1, trajectories.Length - 1)),
};
ls.Points.AddRange(trajectories[i].Select((s, x) => new DataPoint(x * samplePeriod, s)));
}
return lines;
}
public static ScatterSeries[] PlotTrajectoryScatters1D(double[][] trajectories, int samplePeriod, OxyColor c0, OxyColor c1)
{
var scatters = new ScatterSeries[trajectories.Length];
for (int i = 0; i < trajectories.Length; i++)
{
var ss = scatters[i] = new ScatterSeries()
{
MarkerFill = OxyColor.Interpolate(c0, c1, i / (double)Math.Max(1, trajectories.Length - 1)),
MarkerType = MarkerType.Diamond,
MarkerSize = 2,
};
ss.Points.AddRange(trajectories[i].Select((s, x) => new ScatterPoint(x * samplePeriod, s)));
}
return scatters;
}
public static LineSeries[] PlotTrajectoryLines2D(double[][][] trajectories, int samplePeriod, OxyColor c00, OxyColor c10, OxyColor c01, OxyColor c11)
{
var lines = new List<LineSeries>();
for (int i = 0; i < trajectories.Length; i++)
{
for (int j = 0; j < trajectories[i].Length; j++)
{
var iz = (double)i / Math.Max(1, trajectories.Length - 1);
var jz = (double)j / Math.Max(1, trajectories[i].Length - 1);
var ls = new LineSeries()
{
Color = OxyColor.Interpolate(OxyColor.Interpolate(c00, c10, iz), OxyColor.Interpolate(c01, c11, iz), jz),
};
lines.Add(ls);
ls.Points.AddRange(trajectories[i][j].Select((s, x) => new DataPoint(x * samplePeriod, s)));
}
}
return lines.ToArray();
}
}
public static class SamplingExtentions
{
public static double[][] DownsampleTrajectories(this IReadOnlyList<IReadOnlyList<double>> trajectories, int samplePeriod)
{
return trajectories.Select(t => t == null ? null : Downsample(t, samplePeriod).ToArray()).ToArray();
}
public static IEnumerable<T> Downsample<T>(this IEnumerable<T> sequence, int samplePeriod)
{
int c = 0;
foreach (var thing in sequence)
{
if (c == 0)
{
yield return thing;
c = samplePeriod;
}
c--;
}
}
public static double[][] MeanDownsampleTrajectories(this IReadOnlyList<IReadOnlyList<double>> trajectories, int resolution)
{
return trajectories.Select(t => t == null ? null : MeanDownsample(t, resolution)).ToArray();
}
public static double[] MeanDownsample(IReadOnlyList<double> trajectory, int resolution)
{
double[] res = new double[trajectory.Count / resolution];
for (int i = 0; i < res.Length; i++)
{
double total = 0.0;
int c = 0;
for (int j = i * resolution; j < i * resolution + resolution; j++)
{
if (j >= 0 && j < trajectory.Count)
{
total += trajectory[j];
c++;
}
}
res[i] = total / c;
}
return res;
}
}
/// <summary>
/// Implements big-endian (ish) primitive operations for compatibility purposes.
/// </summary>
/// <remarks>
/// Compatible with netstate, but simplified implementation.</remarks>
public static class StreamExtentions
{
/// <summary>
/// Modifies input.
/// </summary>
private static void WriteEndianedBytes(this Stream s, byte[] barr)
{
if (BitConverter.IsLittleEndian)
Array.Reverse(barr);
s.Write(barr);
}
private static byte[] ReadEndianedBytes(this Stream s, int count)
{
var barr = new byte[count];
s.Read(barr);
if (BitConverter.IsLittleEndian)
Array.Reverse(barr);
return barr;
}
public static void WriteInt32(this Stream s, int i)
{
s.WriteEndianedBytes(BitConverter.GetBytes(i));
}
public static void WriteFloat64(this Stream s, double d)
{
// not sure if doubles have an endianness... but they do now
s.WriteEndianedBytes(BitConverter.GetBytes(d));
}
public static int ReadInt32(this Stream s)
{
return BitConverter.ToInt32(s.ReadEndianedBytes(sizeof(int)));
}
public static double ReadFloat64(this Stream s)
{
// not sure if doubles have an endianness... but they do now
return BitConverter.ToDouble(s.ReadEndianedBytes(sizeof(double)));
}
}
public static class RandomExtensions
{
public static double NextSignedUniform(this Random rand, double maxAbs)
{
return (rand.NextDouble() * 2.0 - 1.0) * maxAbs;
}
public static bool NextBool(this Random rand, double probability)
{
return rand.NextDouble() < probability;
}
}
public class Pool<TParams, TResult> where TParams : IEquatable<TParams>
{
public Pool(Func<TParams, TResult> factory)
{
Factory = factory ?? throw new ArgumentNullException(nameof(factory));
}
private Func<TParams, TResult> Factory { get; }
private ConcurrentDictionary<TParams, ConcurrentBag<TResult>> Table { get; }
= new ConcurrentDictionary<TParams, ConcurrentBag<TResult>>();
private ConcurrentBag<TResult> FetchBag(TParams p)
{
return Table.GetOrAdd(p, _ => new ConcurrentBag<TResult>());
}
public Borrowed<TParams, TResult> Borrow(TParams p, out TResult res)
{
var bag = FetchBag(p);
if (!bag.TryTake(out res))
res = Factory(p);
return new Borrowed<TParams, TResult>(this, p, res = Factory(p));
}
public void Release(Borrowed<TParams, TResult> borrowed)
{
FetchBag(borrowed.Param).Add(borrowed.Value);
}
}
public class Borrowed<TParams, TResult> : IDisposable where TParams : IEquatable<TParams>
{
public Borrowed(Pool<TParams, TResult> pool, TParams param, TResult value)
{
Pool = pool ?? throw new ArgumentNullException(nameof(pool));
Param = param;
Value = value;
}
private Pool<TParams, TResult> Pool;
public TParams Param { get; private set; }
public TResult Value { get; private set; }
public void Dispose()
{
Pool?.Release(this);
Pool = null;
Param = default(TParams);
Value = default(TResult);
}
}
}