| Overall Statistics |
|
Total Orders 295 Average Win 0.01% Average Loss -0.03% Compounding Annual Return -0.892% Drawdown 2.000% Expectancy -0.391 Start Equity 100000000 End Equity 98514543.84 Net Profit -1.485% Sharpe Ratio -5.993 Sortino Ratio -5.036 Probabilistic Sharpe Ratio 0.023% Loss Rate 57% Win Rate 43% Profit-Loss Ratio 0.42 Alpha -0.033 Beta 0.003 Annual Standard Deviation 0.005 Annual Variance 0 Information Ratio -0.503 Tracking Error 0.131 Treynor Ratio -10.237 Total Fees $6340.49 Estimated Strategy Capacity $690000.00 Lowest Capacity Asset PL X92QNYYIQDVL Portfolio Turnover 0.24% |
#region imports
from AlgorithmImports import *
#endregion
class ImprovedCommodityMomentumTrading(QCAlgorithm):
'''
Demystifying Time-Series Momentum Strategies: Volatility Estimators, Trading Rules and Pairwise Correlations
This paper proposed 3 modifications to the basic time-series momentum strategies in order to reduce portfolio turnover and improve portfolio performance.
1. Volatility Estimator: Yang and Zhang (2000) range-based estimator, which replaces the traditional estimator (standard deviation of past daily returns)
2. Trading Rules: Trading positions takes a continuum of values between -1 and +1 to reflect the statistical strength of price trend, which replaces the traditional trading rules (binary +1 or -1 based on the sign of historical mean return)
3. Pairwise Correlations: Incorporate signed pairwise correlations in the weighing scheme of portfolio construction
Reference:
[1] Baltas, Nick and Kosowski, Robert, "Demystifying Time-Series Momentum Strategies: Volatility Estimators, Trading Rules and Pairwise Correlations", May 8, 2017.
URL: https://pdfs.semanticscholar.org/a2e9/df201d4b4774fda84a961cc804f2450988c5.pdf
[2] Yang, Dennis, and Qiang Zhang, "Drift‐Independent Volatility Estimation Based on High, Low, Open, and Close Prices", The Journal of Business, vol. 73, no. 3, 2000, pp. 477–492.
URL: www.jstor.org/stable/10.1086/209650.'''
def initialize(self):
self.set_start_date(2018, 1, 1)
self.set_end_date(2019, 9, 1)
self.set_cash(100000000)
self.one_year = 365 # time period for trading rule calculation
self.one_month = 30 # time period for YZ volatility estimator
self.three_months = 90 # time period for pairwise correlation calculation
# Set portfolio target level of volatility, set to 12%
self.portfolio_target_sigma = 0.12
# Last trading date tracker to achieve rebalancing the portfolio every month
self.rebalancing_time = datetime.min
tickers = [Futures.Grains.SOYBEANS,
Futures.Grains.WHEAT,
Futures.Grains.SOYBEAN_MEAL,
Futures.Grains.SOYBEAN_OIL,
Futures.Grains.CORN,
Futures.Grains.OATS,
Futures.Meats.LIVE_CATTLE,
Futures.Meats.FEEDER_CATTLE,
Futures.Meats.LEAN_HOGS,
Futures.Metals.GOLD,
Futures.Metals.SILVER,
Futures.Metals.PLATINUM,
Futures.Energies.BRENT_CRUDE,
Futures.Energies.HEATING_OIL,
Futures.Energies.NATURAL_GAS,
Futures.Energies.LOW_SULFUR_GASOIL,
Futures.Softs.COTTON_2,
Futures.Softs.ORANGE_JUICE,
Futures.Softs.COFFEE,
Futures.Softs.COCOA]
self.symbol_data = {}
for ticker in tickers:
future = self.add_future(ticker,
resolution = Resolution.DAILY,
extended_market_hours = True,
data_normalization_mode = DataNormalizationMode.BACKWARDS_RATIO,
data_mapping_mode = DataMappingMode.OPEN_INTEREST,
contract_depth_offset = 0
)
future.set_leverage(3)
self.symbol_data[future.symbol.id.to_string()] = SymbolData(future)
def on_data(self, data):
'''
Monthly rebalance at the beginning of each month.
Portfolio weights for each constituents are calculated based on Baltas and Kosowski weights.
'''
# Rollover for future contract mapping change
for symbol_data in self.symbol_data.values():
if data.symbol_changed_events.contains_key(symbol_data.symbol):
changed_event = data.symbol_changed_events[symbol_data.symbol]
old_symbol = changed_event.old_symbol
new_symbol = changed_event.new_symbol
tag = f"Rollover - Symbol changed at {self.time}: {old_symbol} -> {new_symbol}"
if self.securities.contains_key(old_symbol):
quantity = self.portfolio[old_symbol].quantity
# Rolling over: to liquidate any position of the old mapped contract and switch to the newly mapped contract
self.liquidate(old_symbol, tag = tag)
if self.securities.contains_key(new_symbol):
self.market_order(new_symbol, quantity // self.securities[new_symbol].symbol_properties.contract_multiplier, tag = tag)
# skip if less than 30 days passed since the last trading date
if self.time < self.rebalancing_time:
return
'''Monthly Rebalance Execution'''
# dataframe that contains the historical data for all securities
history = self.history([x.symbol for x in self.symbol_data.values()], self.one_year, Resolution.DAILY)
history = history.droplevel([0]).replace(0, np.nan)
# Get the security symbols are are in the history dataframe
available_symbols = list(set(history.index.get_level_values(level = 0)))
if len(available_symbols) == 0:
return
# Get the trade signals and YZ volatility for all securities
trade_signals = self.get_trading_signal(history)
volatility = self.get_y_z_volatility(history, available_symbols)
# Get the correlation factor
c_f_rho_bar = self.get_correlation_factor(history, trade_signals, available_symbols)
# Rebalance the portfolio according to Baltas and Kosowski suggested weights
n_assets = len(available_symbols)
for symbol, signal, vol in zip(available_symbols, trade_signals, volatility):
# Baltas and Kosowski weights (Equation 19 in [1])
weight = (signal*self.portfolio_target_sigma*c_f_rho_bar)/(n_assets*vol)
if str(weight) == 'nan': continue
mapped = self.symbol_data[symbol].mapped
qty = self.calculate_order_quantity(mapped, np.clip(weight, -1, 1))
multiplier = self.securities[mapped].symbol_properties.contract_multiplier
order_qty = (qty - self.portfolio[mapped].quantity) // multiplier
self.market_order(mapped, order_qty)
# Set next rebalance time
self.rebalancing_time = Expiry.end_of_month(self.time)
def get_correlation_factor(self, history, trade_signals, available_symbols):
'''Calculate the Correlation Factor, which is a function of the average pairwise correlation of all portfolio contituents
- the calculation is based on past three month pairwise correlation
- Notations:
rho_bar - average pairwise correlation of all portfolio constituents
c_f_rho_bar - the correlation factor as a function of rho_bar
'''
# Get the past three month simple daily returns for all securities
settle = history.unstack(level = 0)['close']
settle = settle.groupby([x.date() for x in settle.index]).last()
past_three_month_returns = settle.pct_change().loc[settle.index[-1]-timedelta(self.three_months):]
# Get number of assets
n_assets = len(available_symbols)
# Get the pairwise signed correlation matrix for all assets
correlation_matrix = past_three_month_returns.corr()
# Calculate rho_bar
summation = 0
for i in range(n_assets-1):
for temp in range(n_assets - 1 - i):
j = i + temp + 1
x_i = trade_signals[i]
x_j = trade_signals[j]
rho_i_j = correlation_matrix.iloc[i,j]
summation += x_i * x_j * rho_i_j
# Equation 14 in [1]
rho_bar = (2 * summation) / (n_assets * (n_assets - 1))
# Calculate the correlation factor (c_f_rho_bar)
# Equation 18 in [1]
return np.sqrt(n_assets / (1 + (n_assets - 1) * rho_bar))
def get_trading_signal(self, history):
'''TREND Trading Signal
- Uses the t-statistics of historical daily log-returns to reflect the strength of price movement trend
- TREND Signal Conditions:
t-stat > 1 => TREND Signal = 1
t-stat < 1 => TREND Signal = -1
-1 < t-stat < 1 => TREND Signal = t-stat
'''
settle = history.unstack(level = 0)['close']
settle = settle.groupby([x.date() for x in settle.index]).last()
# daily futures log-returns based on close-to-close
log_returns = np.log(settle/settle.shift(1)).dropna()
# Calculate the t-statistics as
# (mean-0)/(stdev/sqrt(n)), where n is sample size
mean = np.mean(log_returns)
std = np.std(log_returns)
n = len(log_returns)
t_stat = mean/(std/np.sqrt(n))
# cap holding at 1 and -1
return np.clip(t_stat, a_max=1, a_min=-1)
def get_y_z_volatility(self, history, available_symbols):
'''Yang and Zhang 'Drift-Independent Volatility Estimation'
Formula: sigma__y_z^2 = sigma__o_j^2 + self.k * sigma__s_d^2 + (1-self.k)*sigma__r_s^2 (Equation 20 in [1])
where, sigma__o_j - (Overnight Jump Volitility estimator)
sigma__s_d - (Standard Volitility estimator)
sigma__r_s - (Rogers and Satchell Range Volatility estimator)
'''
y_z_volatility = []
time_index = history.loc[available_symbols[0]].index
#Calculate YZ volatility for each security and append to list
for ticker in available_symbols:
past_month_ohlc = history.loc[ticker].loc[time_index[-1]-timedelta(self.one_month):time_index[-1]].dropna()
open, high, low, close = past_month_ohlc.open, past_month_ohlc.high, past_month_ohlc.low, past_month_ohlc.close
estimation_period = past_month_ohlc.shape[0]
if estimation_period <= 1:
y_z_volatility.append(np.nan)
continue
# Calculate constant parameter k for Yang and Zhang volatility estimator
# using the formula found in Yang and Zhang (2000)
k = 0.34 / (1.34 + (estimation_period + 1) / (estimation_period - 1))
# sigma__o_j (overnight jump => stdev of close-to-open log returns)
open_to_close_log_returns = np.log(open/close.shift(1))
open_to_close_log_returns = open_to_close_log_returns[np.isfinite(open_to_close_log_returns)]
sigma__o_j = np.std(open_to_close_log_returns)
# sigma__s_d (standard deviation of close-to-close log returns)
close_to_close_log_returns = np.log(close/close.shift(1))
close_to_close_log_returns = close_to_close_log_returns[np.isfinite(close_to_close_log_returns)]
sigma__s_d = np.std(close_to_close_log_returns)
# sigma__r_s (Rogers and Satchell (1991))
h = np.log(high/open)
l = np.log(low/open)
c = np.log(close/open)
sigma__r_s_daily = (h * (h - c) + l * (l - c))**0.5
sigma__r_s_daily = sigma__r_s_daily[np.isfinite(sigma__r_s_daily)]
sigma__r_s = np.mean(sigma__r_s_daily)
# daily Yang and Zhang volatility
sigma__y_z = np.sqrt(sigma__o_j**2 + k * sigma__s_d**2 + (1 - k) * sigma__r_s**2)
# append annualized volatility to the list
y_z_volatility.append(sigma__y_z*np.sqrt(252))
return y_z_volatility
class SymbolData:
def __init__(self, future):
self._future = future
self.id = future.symbol.id.to_string()
self.symbol = future.symbol
self.weight = 0
@property
def mapped(self):
return self._future.mapped