Overall Statistics
Total Orders
409
Average Win
0.00%
Average Loss
0.00%
Compounding Annual Return
12.110%
Drawdown
17.400%
Expectancy
0.273
Start Equity
1000000
End Equity
1090172.32
Net Profit
9.017%
Sharpe Ratio
0.257
Sortino Ratio
0.277
Probabilistic Sharpe Ratio
32.541%
Loss Rate
33%
Win Rate
67%
Profit-Loss Ratio
0.91
Alpha
-0.015
Beta
0.878
Annual Standard Deviation
0.159
Annual Variance
0.025
Information Ratio
-0.899
Tracking Error
0.025
Treynor Ratio
0.047
Total Fees
$415.40
Estimated Strategy Capacity
$790000000.00
Lowest Capacity Asset
IWM RV0PWMLXVHPH
Portfolio Turnover
0.42%
Drawdown Recovery
126
using System;
using System.Collections.Generic;
using System.Linq;
using QuantConnect;
using QuantConnect.Algorithm;
using QuantConnect.Data;
using QuantConnect.Indicators;
using QuantConnect.Orders;
using MathNet.Numerics.LinearAlgebra;
using MathNet.Numerics.LinearAlgebra.Double;

namespace QuantConnect.Algorithm.CSharp
{
    public class MovingFrame
    {
        private int maxLen;
        private List<Symbol> symbols;
        private List<double[]> rows;

        public MovingFrame(int maxLength, List<Symbol> symbolList)
        {
            maxLen = maxLength;
            symbols = symbolList;
            rows = new List<double[]>();
        }

        public void AddRow(double[] newData)
        {
            rows.Add(newData);
            if (rows.Count > maxLen)
            {
                rows.RemoveAt(0);
            }
        }

        public double[,] GetMatrix()
        {
            int numRows = rows.Count;
            int numCols = symbols.Count;
            double[,] result = new double[numRows, numCols];
            
            for (int i = 0; i < numRows; i++)
            {
                for (int j = 0; j < numCols; j++)
                {
                    result[i, j] = rows[i][j];
                }
            }
            return result;
        }

        public double[,] GetLogReturns()
        {
            var matrix = GetMatrix();
            int rows = matrix.GetLength(0);
            int cols = matrix.GetLength(1);
            double[,] logReturns = new double[rows, cols];

            for (int j = 0; j < cols; j++)
            {
                logReturns[0, j] = 0.0;
                for (int i = 1; i < rows; i++)
                {
                    if (matrix[i - 1, j] > 0)
                    {
                        logReturns[i, j] = matrix[i, j] - matrix[i - 1, j];
                    }
                    else
                    {
                        logReturns[i, j] = 0.0;
                    }
                }
            }
            return logReturns;
        }

        public int Count => rows.Count;
    }

    public class RBFKernel
    {
        private double lengthScale;

        public RBFKernel(double lengthScaleValue = 20.0)
        {
            lengthScale = lengthScaleValue;
        }

        public double[,] ComputeKernel(double[,] X1, double[,] X2)
        {
            int n1 = X1.GetLength(0);
            int n2 = X2.GetLength(0);
            double[,] K = new double[n1, n2];

            for (int i = 0; i < n1; i++)
            {
                for (int j = 0; j < n2; j++)
                {
                    double diff = X1[i, 0] - X2[j, 0];
                    K[i, j] = Math.Exp(-diff * diff / (2.0 * lengthScale * lengthScale));
                }
            }
            return K;
        }

        public double[] ComputeDiag(double[,] X)
        {
            int n = X.GetLength(0);
            double[] diag = new double[n];
            for (int i = 0; i < n; i++)
            {
                diag[i] = 1.0;
            }
            return diag;
        }
    }

    public class CompositeKernel
    {
        private double constantValue;
        private RBFKernel rbfKernel;
        private double whiteNoiseLevel;

        public CompositeKernel(double constant = 1.0, double lengthScale = 20.0, double noiseLevel = 1e-4)
        {
            constantValue = constant;
            rbfKernel = new RBFKernel(lengthScale);
            whiteNoiseLevel = noiseLevel;
        }

        public double[,] ComputeKernel(double[,] X1, double[,] X2, bool addNoise = false)
        {
            var K_rbf = rbfKernel.ComputeKernel(X1, X2);
            int n1 = K_rbf.GetLength(0);
            int n2 = K_rbf.GetLength(1);
            double[,] K = new double[n1, n2];

            for (int i = 0; i < n1; i++)
            {
                for (int j = 0; j < n2; j++)
                {
                    K[i, j] = constantValue * K_rbf[i, j];
                    if (addNoise && i == j && n1 == n2)
                    {
                        K[i, j] += whiteNoiseLevel;
                    }
                }
            }
            return K;
        }

        public double[] ComputeDiag(double[,] X)
        {
            int n = X.GetLength(0);
            double[] diag = new double[n];
            for (int i = 0; i < n; i++)
            {
                diag[i] = constantValue + whiteNoiseLevel;
            }
            return diag;
        }
    }

    public class GaussianProcessRegressor
    {
        private CompositeKernel kernel;
        private double alpha;
        private bool normalizeY;
        private double[,] XTrain;
        private double[] yTrain;
        private Matrix<double> L;
        private Vector<double> alphaVector;
        private double yTrainMean;
        private double yTrainStd;

        public GaussianProcessRegressor(CompositeKernel kernelInstance, double alphaValue = 1e-8, bool normalize = true)
        {
            kernel = kernelInstance;
            alpha = alphaValue;
            normalizeY = normalize;
        }

        public void Fit(double[,] X, double[] y)
        {
            XTrain = X;
            int n = y.Length;

            if (normalizeY)
            {
                yTrainMean = y.Average();
                double variance = y.Select(v => Math.Pow(v - yTrainMean, 2)).Average();
                yTrainStd = Math.Sqrt(variance);
                if (yTrainStd < 1e-10) yTrainStd = 1.0;

                yTrain = new double[n];
                for (int i = 0; i < n; i++)
                {
                    yTrain[i] = (y[i] - yTrainMean) / yTrainStd;
                }
            }
            else
            {
                yTrain = y;
                yTrainMean = 0.0;
                yTrainStd = 1.0;
            }

            var K = kernel.ComputeKernel(XTrain, XTrain, true);
            var KMatrix = DenseMatrix.OfArray(K);

            for (int i = 0; i < n; i++)
            {
                KMatrix[i, i] += alpha;
            }

            try
            {
                L = KMatrix.Cholesky().Factor;
            }
            catch
            {
                for (int i = 0; i < n; i++)
                {
                    KMatrix[i, i] += 1e-6;
                }
                L = KMatrix.Cholesky().Factor;
            }

            var yVector = DenseVector.OfArray(yTrain);
            var LInvY = L.TransposeThisAndMultiply(L).Solve(yVector);
            alphaVector = LInvY;
        }

        public (double[] means, double[] stds) Predict(double[,] X)
        {
            int nTest = X.GetLength(0);
            var KTrans = kernel.ComputeKernel(X, XTrain, false);
            var KTransMatrix = DenseMatrix.OfArray(KTrans);

            var yMeanVector = KTransMatrix.Multiply(alphaVector);

            double[] means = new double[nTest];
            for (int i = 0; i < nTest; i++)
            {
                means[i] = yMeanVector[i] * yTrainStd + yTrainMean;
            }

            var V = L.TransposeThisAndMultiply(KTransMatrix.Transpose());
            var VTV = V.TransposeThisAndMultiply(V);

            var kernelDiag = kernel.ComputeDiag(X);
            double[] stds = new double[nTest];

            for (int i = 0; i < nTest; i++)
            {
                double variance = kernelDiag[i] - VTV[i, i];
                if (variance < 0) variance = 0;
                variance *= (yTrainStd * yTrainStd);
                stds[i] = Math.Sqrt(variance);
            }

            return (means, stds);
        }
    }

    public class GPRForecast
    {
        public static (double[,] means, double[,] stds) FitAndForecast(double[,] Y, int forecastSteps)
        {
            int m = Y.GetLength(0);
            int n = Y.GetLength(1);

            double[,] XObs = new double[m, 1];
            for (int i = 0; i < m; i++)
            {
                XObs[i, 0] = i;
            }

            double step = 1.0;
            double tLast = m - 1;
            double[,] XFut = new double[forecastSteps, 1];
            for (int i = 0; i < forecastSteps; i++)
            {
                XFut[i, 0] = tLast + step * (i + 1);
            }

            double[,] means = new double[forecastSteps, n];
            double[,] stds = new double[forecastSteps, n];

            for (int j = 0; j < n; j++)
            {
                double[] yCol = new double[m];
                for (int i = 0; i < m; i++)
                {
                    yCol[i] = Y[i, j];
                }

                var kernel = new CompositeKernel(constant: 1.0, lengthScale: 20.0, noiseLevel: 1e-4);
                var gpr = new GaussianProcessRegressor(kernel, alphaValue: 1e-8, normalize: true);
                gpr.Fit(XObs, yCol);

                var (predMeans, predStds) = gpr.Predict(XFut);

                for (int i = 0; i < forecastSteps; i++)
                {
                    means[i, j] = predMeans[i];
                    stds[i, j] = predStds[i];
                }
            }

            return (means, stds);
        }
    }

    public class PortfolioOptimizer
    {
        public static (double[] weights, double cash) OptimizePortfolio(
            double[,] mu, 
            double[,] sigma, 
            int nPortfolios = 10000, 
            double maxCash = 0.3, 
            bool allowShort = false, 
            int randomSeed = 42)
        {
            int n = mu.GetLength(1);
            double[] muFlat = new double[n];
            double[] sigmaFlat = new double[n];

            for (int i = 0; i < n; i++)
            {
                muFlat[i] = mu[0, i];
                sigmaFlat[i] = sigma[0, i];
            }

            double[,] cov = new double[n, n];
            for (int i = 0; i < n; i++)
            {
                for (int j = 0; j < n; j++)
                {
                    if (i == j)
                    {
                        cov[i, j] = sigmaFlat[i] * sigmaFlat[i];
                    }
                    else
                    {
                        cov[i, j] = 0.0;
                    }
                }
            }

            Random rand = new Random(randomSeed);
            double bestSharpe = double.NegativeInfinity;
            double[] bestWeights = new double[n];
            double bestCash = 0.0;

            for (int iter = 0; iter < nPortfolios; iter++)
            {
                double[] weights = new double[n];
                for (int i = 0; i < n; i++)
                {
                    weights[i] = allowShort ? (rand.NextDouble() * 2.0 - 1.0) : rand.NextDouble();
                }

                double cash = rand.NextDouble() * maxCash;
                double total = weights.Sum() + cash;

                for (int i = 0; i < n; i++)
                {
                    weights[i] /= total;
                }
                cash /= total;

                double portfolioReturn = 0.0;
                for (int i = 0; i < n; i++)
                {
                    portfolioReturn += muFlat[i] * weights[i];
                }

                double portfolioVariance = 0.0;
                for (int i = 0; i < n; i++)
                {
                    for (int j = 0; j < n; j++)
                    {
                        portfolioVariance += weights[i] * cov[i, j] * weights[j];
                    }
                }

                double portfolioStdDev = Math.Sqrt(portfolioVariance);
                double sharpeRatio = portfolioStdDev > 0 ? portfolioReturn / portfolioStdDev : 0.0;

                if (sharpeRatio > bestSharpe)
                {
                    bestSharpe = sharpeRatio;
                    bestWeights = (double[])weights.Clone();
                    bestCash = cash;
                }
            }

            return (bestWeights, bestCash);
        }
    }

    public class LongTermMomentumAlgorithm : QCAlgorithm
    {
        private int lookback = 41;
        private int warmup = 14;
        private double stopLossScale = 1.5;

        private List<string> etfs;
        private List<Symbol> symbols;
        private MovingFrame frame;
        private Dictionary<Symbol, decimal?> stopLossPrice;

        public override void Initialize()
        {
            SetStartDate(2013, 1, 1);
            SetEndDate(2014, 1, 1);
            SetCash(100000);

            etfs = new List<string> { "SPY", "QQQ", "IWM" };
            symbols = new List<Symbol>();

            foreach (var etf in etfs)
            {
                symbols.Add(AddEquity(etf, Resolution.Hour).Symbol);
            }

            frame = new MovingFrame(lookback, symbols);
            SetWarmUp(warmup);

            stopLossPrice = new Dictionary<Symbol, decimal?>();
            foreach (var sym in symbols)
            {
                stopLossPrice[sym] = null;
            }

            Schedule.On(
                DateRules.EveryDay(symbols[0]),
                TimeRules.BeforeMarketClose(symbols[0], 10),
                OnEndOfDay
            );
        }

        public override void OnData(Slice data)
        {
            foreach (var sym in symbols)
            {
                if (!stopLossPrice.ContainsKey(sym) || stopLossPrice[sym] == null)
                    continue;

                if (Securities[sym].Price < stopLossPrice[sym])
                {
                    Debug($"Stop-loss hit at {Securities[sym].Price}");
                    stopLossPrice[sym] = null;
                }
            }
        }

        private void OnEndOfDay()
        {
            double[] prices = new double[symbols.Count];
            for (int i = 0; i < symbols.Count; i++)
            {
                prices[i] = (double)Securities[symbols[i]].Close;
            }
            frame.AddRow(prices);

            if (IsWarmingUp)
                return;

            var logReturns = frame.GetLogReturns();
            var (means, stds) = GPRForecast.FitAndForecast(logReturns, 1);

            var (wOpt, cash) = PortfolioOptimizer.OptimizePortfolio(
                means,
                stds,
                nPortfolios: 10000,
                allowShort: false
            );

            double[] portfolioHoldingsValues = new double[symbols.Count];
            for (int i = 0; i < symbols.Count; i++)
            {
                portfolioHoldingsValues[i] = (double)(Portfolio[symbols[i]].Quantity * Securities[symbols[i]].Close);
            }

            double[] assetDifference = new double[symbols.Count];
            for (int i = 0; i < symbols.Count; i++)
            {
                assetDifference[i] = (double)Portfolio.TotalPortfolioValue * wOpt[i] - portfolioHoldingsValues[i];
            }

            var sortedIndices = assetDifference
                .Select((value, index) => new { Value = value, Index = index })
                .OrderBy(x => x.Value)
                .Select(x => x.Index)
                .ToArray();

            foreach (var symIdx in sortedIndices)
            {
                var sym = symbols[symIdx];
                double newNumStock = assetDifference[symIdx] / (double)Securities[sym].Close;
                int orderQuantity = newNumStock > 0 ? (int)newNumStock : (int)(newNumStock - 1);

                if (orderQuantity == 0)
                    continue;

                var entryTicket = MarketOrder(sym, orderQuantity);

                double stopLossPriceScale = stopLossScale * stds[0, symIdx] /
                    ((double)Securities[sym].Close + means[0, symIdx]);

                if (entryTicket.AverageFillPrice > 0)
                {
                    stopLossPrice[sym] = entryTicket.AverageFillPrice * (decimal)(1.0 - stopLossPriceScale);
                }
            }
        }
    }
}