Overall Statistics
Total Orders
7590
Average Win
0.10%
Average Loss
-0.10%
Compounding Annual Return
4.056%
Drawdown
12.000%
Expectancy
0.029
Start Equity
1000000
End Equity
1220077.79
Net Profit
22.008%
Sharpe Ratio
-0.111
Sortino Ratio
-0.138
Probabilistic Sharpe Ratio
5.090%
Loss Rate
49%
Win Rate
51%
Profit-Loss Ratio
1.03
Alpha
-0.01
Beta
0.03
Annual Standard Deviation
0.075
Annual Variance
0.006
Information Ratio
-0.486
Tracking Error
0.156
Treynor Ratio
-0.276
Total Fees
$28911.10
Estimated Strategy Capacity
$610000000.00
Lowest Capacity Asset
ASMLF R735QTJ8XC9X
Portfolio Turnover
30.12%
Drawdown Recovery
614
#region imports
from AlgorithmImports import *

from sklearn.naive_bayes import GaussianNB
from dateutil.relativedelta import relativedelta
#endregion


class GaussianNaiveBayesAlphaModel(AlphaModel):
    """
    Emits insights in the direction of the prediction made by the SymbolData objects.
    """
    _securities = []
    _new_securities = False

    def __init__(self, algorithm, num_days_per_sample=4, num_samples=100):
        self._algorithm = algorithm
        self._num_days_per_sample = num_days_per_sample
        self._num_samples = num_samples

    def update(self, algorithm, data):
        """
        Called each time the alpha model receives a new data slice.
        
        Input:
         - algorithm
            Algorithm instance running the backtest
         - data
            A data structure for all of an algorithm's data at a single time step
        
        Returns a list of Insights to the portfolio construction model.
        """
        if self._new_securities:
            self._train()
            self._new_securities = False
        
        tradable_securities = []
        features = [[]]
        
        for security in self._securities:
            if data.contains_key(security) and data[security] is not None and self._is_ready(security):
                tradable_securities.append(security)
                features[0].extend(security.features_by_day.iloc[-1].values)

        insights = []
        if len(tradable_securities) == 0:
            return []
            
        weight = 0.5 / len(tradable_securities)
        for security in tradable_securities:
            direction = security.model.predict(features)
            if direction:
                insights.append(
                    Insight.price(security, data.time + timedelta(days=1, seconds=-1), direction, weight=weight)
                )

        return insights
        
    def on_securities_changed(self, algorithm, changes):
        """
        Called each time the universe has changed.
        
        Input:
         - algorithm
            Algorithm instance running the backtest
         - changes
            The additions and removals of the algorithm's security subscriptions
        """
        for security in changes.added_securities:
            security.model = None
            security.previous_open = 0
            
            # Setup consolidators
            security.consolidator = algorithm.consolidate(security, timedelta(1), self._custom_daily_handler)
            
            # Warm up training set
            security.roc_window = np.array([])
            security.labels_by_day = pd.Series()
            
            data = {f'{security.symbol.id}_(t-{i})' : [] for i in range(1, self._num_days_per_sample + 1)}
            security.features_by_day = pd.DataFrame(data)
            
            lookback = self._num_days_per_sample + self._num_samples + 1 
            history = algorithm.history(security, lookback, Resolution.DAILY)
            if history.empty or 'close' not in history:
                algorithm.log(f"Not enough history for {security.symbol} yet")    
                continue
            
            history = history.loc[security.symbol]
            history['open_close_return'] = (history.close - history.open) / history.open
            
            start = history.shift(-1).open
            end = history.shift(-2).open
            history['future_return'] = (end - start) / start
            
            for day, row in history.iterrows():
                security.previous_open = row.open
                if self._update_features(security, day, row.open_close_return) and not pd.isnull(row.future_return):
                    row = pd.Series([np.sign(row.future_return)], index=[day])
                    security.labels_by_day = pd.concat([security.labels_by_day, row]).iloc[-self._num_samples:]

            self._securities.append(security)
            
        for security in changes.removed_securities:
            algorithm.subscription_manager.remove_consolidator(security, security.consolidator)
            self._securities.remove(security)
        
        self._new_securities = True

    def _custom_daily_handler(self, consolidated):
        """
        Updates the rolling lookback of training data.
        
        Inputs
         - consolidated
            Tradebar representing the latest completed trading day
        """
        time = consolidated.end_time
        security = self._algorithm.securities[consolidated.symbol]
        if time in security.features_by_day.index:
            return
        
        _open = consolidated.open
        close = consolidated.close
        
        open_close_return = (close - _open) / _open
        if self._update_features(security, time, open_close_return) and security.previous_open:
            day = security.features_by_day.index[-3]
            open_open_return = (_open - security.previous_open) / security.previous_open
            security.labels_by_day[day] = np.sign(open_open_return)
            security.labels_by_day = security.labels_by_day[-self._num_samples:]
            
        security.previous_open = _open

    def _update_features(self, security, day, open_close_return):
        """
        Updates the training data features.
        
        Inputs
         - security
            The security to update
         - day
            Timestamp of when we're aware of the open_close_return
         - open_close_return
            Open to close intraday return
            
        Returns T/F, showing if the features are in place to start updating the training labels.
        """
        security.roc_window = np.append(open_close_return, security.roc_window)[:self._num_days_per_sample]
        if len(security.roc_window) < self._num_days_per_sample: 
            return False
        security.features_by_day.loc[day] = security.roc_window
        security.features_by_day = security.features_by_day[-(self._num_samples+2):]
        return True
    
    def _train(self):
        """
        Trains the Gaussian Naive Bayes classifier model.
        """
        features = pd.DataFrame()
        labels_by_security = {}
        
        for security in self._securities:
            if self._is_ready(security):
                features = pd.concat([features, security.features_by_day], axis=1)
                labels_by_security[security] = security.labels_by_day
        
        for security in self._securities:
            if self._is_ready(security):
                security.model = GaussianNB().fit(features.iloc[:-2], labels_by_security[security])

    def _is_ready(self, security):
        return security.features_by_day.shape[0] == self._num_samples + 2
        
#region imports
from AlgorithmImports import *

from universe import BigTechUniverseSelectionModel
from alpha import GaussianNaiveBayesAlphaModel
#endregion


class GaussianNaiveBayesClassificationAlgorithm(QCAlgorithm):

    def initialize(self):
        self.set_start_date(self.end_date - timedelta(5*365))
        self.set_cash(1_000_000)
        self.settings.daily_precise_end_time = False
        self.set_universe_selection(BigTechUniverseSelectionModel())
        self.universe_settings.resolution = Resolution.DAILY
        
        self.set_alpha(GaussianNaiveBayesAlphaModel(self))
        
        self.set_portfolio_construction(InsightWeightingPortfolioConstructionModel())
        
        self.set_execution(ImmediateExecutionModel())
        
        self.set_brokerage_model(AlphaStreamsBrokerageModel())
        
#region imports
from AlgorithmImports import *
#endregion


class SymbolData:
    """
    This class stores data unique to each security in the universe.
    """

    def __init__(self, security, algorithm, num_days_per_sample=4, num_samples=100):
        """
        Input:
         - security
            Security object for the security
         - algorithm
            The algorithm instance running the backtest
         - num_days_per_sample
            The number of open-close intraday returns for each sample
         - num_samples
            The number of samples to train the model
        """
        self.model = None
        self._symbol = security.symbol
        self._algorithm = algorithm
        self._num_days_per_sample = num_days_per_sample
        self._num_samples = num_samples 
        self._previous_open = 0
        
        # Setup consolidators
        self._consolidator = TradeBarConsolidator(timedelta(days=1))
        self._consolidator.data_consolidated += self._custom_daily_handler
        algorithm.subscription_manager.add_consolidator(self._symbol, self._consolidator)
        
        # Warm up training set
        self._roc_window = np.array([])
        self.labels_by_day = pd.Series()
        
        data = {f'{self._symbol.id}_(t-{i})' : [] for i in range(1, num_days_per_sample + 1)}
        self.features_by_day = pd.DataFrame(data)
        
        lookback = num_days_per_sample + num_samples + 1 
        history = algorithm.history(self._symbol, lookback, Resolution.DAILY)
        if history.empty or 'close' not in history:
            algorithm.log(f"Not enough history for {self._symbol} yet")    
            return
        
        history = history.loc[self._symbol]
        history['open_close_return'] = (history.close - history.open) / history.open
        
        start = history.shift(-1).open
        end = history.shift(-2).open
        history['future_return'] = (end - start) / start
        
        for day, row in history.iterrows():
            self._previous_open = row.open
            if self._update_features(day, row.open_close_return) and not pd.isnull(row.future_return):
                row = pd.Series([np.sign(row.future_return)], index=[day])
                self.labels_by_day = pd.concat([self.labels_by_day, row]).iloc[-self._num_samples:]
    
    def _update_features(self, day, open_close_return):
        """
        Updates the training data features.
        
        Inputs
         - day
            Timestamp of when we're aware of the open_close_return
         - open_close_return
            Open to close intraday return
            
        Returns T/F, showing if the features are in place to start updating the training labels.
        """
        self._roc_window = np.append(open_close_return, self._roc_window)[:self._num_days_per_sample]
        
        if len(self._roc_window) < self._num_days_per_sample: 
            return False
            
        self.features_by_day.loc[day] = self._roc_window
        self.features_by_day = self.features_by_day[-(self._num_samples+2):]
        return True
        
        
    def _custom_daily_handler(self, sender, consolidated):
        """
        Updates the rolling lookback of training data.
        
        Inputs
         - sender
            Function calling the consolidator
         - consolidated
            Tradebar representing the latest completed trading day
        """
        time = consolidated.end_time
        if time in self.features_by_day.index:
            return
        
        _open = consolidated.open
        close = consolidated.close
        
        open_close_return = (close - _open) / _open
        if self._update_features(time, open_close_return) and self._previous_open:
            day = self.features_by_day.index[-3]
            open_open_return = (_open - self._previous_open) / self._previous_open
            self.labels_by_day[day] = np.sign(open_open_return)
            self.labels_by_day = self.labels_by_day[-self._num_samples:]
            
        self._previous_open = _open
        
    @property
    def is_ready(self):
        return self.features_by_day.shape[0] == self._num_samples + 2
        
#region imports
from AlgorithmImports import *

from Selection.FundamentalUniverseSelectionModel import FundamentalUniverseSelectionModel
#endregion


class BigTechUniverseSelectionModel(FundamentalUniverseSelectionModel):
    """
    This universe selection model contain the 10 largest securities in the technology sector.
    """
    
    def __init__(self, fine_size=10):
        """
        Input:
         - fine_size
            Maximum number of securities in the universe
        """
        self._fine_size = fine_size
        self._month = -1
        super().__init__(True)

    def select_coarse(self, algorithm, coarse):
        """
        Coarse universe selection is called each day at midnight.
        
        Input:
         - algorithm
            Algorithm instance running the backtest
         - coarse
            List of CoarseFundamental objects
            
        Returns the symbols that have fundamental data.
        """
        if algorithm.time.month == self._month:
            return Universe.UNCHANGED
        return [ x.symbol for x in coarse if x.has_fundamental_data ]
        
    def select_fine(self, algorithm, fine):
        """
        Fine universe selection is performed each day at midnight after `SelectCoarse`.
        
        Input:
         - algorithm
            Algorithm instance running the backtest
         - fine
            List of FineFundamental objects that result from `SelectCoarse` processing
        
        Returns a list of symbols that are in the energy sector and have the largest market caps.
        """
        self._month = algorithm.time.month
        tech_stocks = [ f for f in fine if f.asset_classification.morningstar_sector_code == MorningstarSectorCode.TECHNOLOGY ]
        sorted_by_market_cap = sorted(tech_stocks, key=lambda x: x.market_cap, reverse=True)
        return [ x.symbol for x in sorted_by_market_cap[:self._fine_size] ]