| Overall Statistics |
|
Total Orders 0 Average Win 0% Average Loss 0% Compounding Annual Return 0% Drawdown 0% Expectancy 0 Start Equity 100000 End Equity 100000 Net Profit 0% Sharpe Ratio 0 Sortino Ratio 0 Probabilistic Sharpe Ratio 0% Loss Rate 0% Win Rate 0% Profit-Loss Ratio 0 Alpha 0 Beta 0 Annual Standard Deviation 0 Annual Variance 0 Information Ratio 0 Tracking Error 0 Treynor Ratio 0 Total Fees $0.00 Estimated Strategy Capacity $0 Lowest Capacity Asset Portfolio Turnover 0% Drawdown Recovery 0 |
from AlgorithmImports import *
from datetime import timedelta, datetime
import pandas as pd
import time
class IPODailyGainRankingAlgorithm(QCAlgorithm):
def Initialize(self):
# Set start date to yesterday (always valid for backtesting)
yesterday = datetime.now() - timedelta(days=1)
self.SetStartDate(yesterday.year, yesterday.month, yesterday.day)
self.SetCash(100000)
# Use daily resolution for coarse data and history requests
self.UniverseSettings.Resolution = Resolution.Daily
# ======================
# USER DEFINED FILTER VARIABLES
# ======================
self.top_symbols_count = 500
self.min_price = 0.10
self.min_shares_per_second = 1 # Minimum shares traded per second
# ======================
# ALGORITHM STATE VARIABLES
# ======================
self.ranking_done = False
self.filtered_symbols = []
self.filtered_data = {} # Store symbol info with company name and exchange
self.ranked_symbols = []
self.process_start_time = None
self.warning_counts = {"limited_history": 0, "extreme_returns": 0, "data_quality": 0}
# Add the coarse universe
self.AddUniverse(self.CoarseSelectionFunction)
self.Log("Algorithm initialized. Waiting for coarse universe data...")
def CoarseSelectionFunction(self, coarse):
"""
Coarse universe filter. Called daily at midnight with data from the previous trading day.
"""
if self.ranking_done:
return Universe.Unchanged
coarse_list = list(coarse)
total_symbols = len(coarse_list)
# Calculate minimum volume
min_volume = self.min_shares_per_second * 23400
# Apply filters and store symbol info
filtered_symbols = []
for x in coarse_list:
if x.Price is None or x.Volume is None:
continue
if x.Price >= self.min_price and x.Volume >= min_volume:
# Get company name and exchange info
company_name = "Unknown"
exchange = str(x.Symbol.ID.Market)
# Try to get company name from fundamental data
if hasattr(x, 'CompanyName') and x.CompanyName:
company_name = x.CompanyName
elif hasattr(x, 'Fundamental') and hasattr(x.Fundamental, 'CompanyName'):
company_name = x.Fundamental.CompanyName
# Store symbol info
self.filtered_data[x.Symbol] = {
'company_name': company_name[:30], # Truncate for log readability
'exchange': exchange,
'coarse_price': x.Price,
'coarse_volume': x.Volume
}
filtered_symbols.append(x.Symbol)
# Log filtering results
excluded = total_symbols - len(filtered_symbols)
self.Log("=" * 60)
self.Log(f"FILTER RESULTS: Found: {total_symbols}, Excluded: {excluded}, Remaining: {len(filtered_symbols)}")
self.Log("=" * 60)
# Store filtered symbols and trigger ranking
self.filtered_symbols = filtered_symbols
self.ranking_done = True
self.process_start_time = time.time()
# Schedule ranking
self.Schedule.On(
self.DateRules.Today,
self.TimeRules.AfterMarketOpen("SPY", 1),
self.ProcessRanking
)
return filtered_symbols
def ProcessRanking(self):
"""
Main ranking logic with verification data.
"""
self.Log("Starting ranking process...")
if not self.filtered_symbols:
self.Log("No symbols to process. Exiting.")
self.Quit()
return
# Request historical data
history = self.History(
self.filtered_symbols,
timedelta(days=365*100),
Resolution.Daily
)
if history.empty:
self.Log("No historical data returned.")
self.Quit()
return
# Group by symbol and process
grouped = history.groupby(level=0)
ranking_data = []
for symbol, symbol_data in grouped:
try:
# Get first and last bars
first_bar = symbol_data.iloc[0]
last_bar = symbol_data.iloc[-1]
# Extract dates and prices
first_date = symbol_data.index.get_level_values(1)[0]
last_date = symbol_data.index.get_level_values(1)[-1]
first_open = first_bar.open
last_close = last_bar.close
# Calculate metrics
days_elapsed = (last_date - first_date).days
if days_elapsed == 0:
continue
percent_change = ((last_close - first_open) / first_open) * 100
avg_daily_percent_gain = percent_change / days_elapsed
# Get company info
company_info = self.filtered_data.get(symbol, {'company_name': 'Unknown', 'exchange': 'Unknown'})
# Generate warnings if warranted
warnings = self.GenerateWarnings(symbol, days_elapsed, percent_change, symbol_data)
# Store data for ranking
ranking_data.append({
'symbol': symbol,
'company_name': company_info['company_name'],
'exchange': company_info['exchange'],
'avg_daily_pct': avg_daily_percent_gain,
'first_date': first_date,
'first_open': first_open,
'days_elapsed': days_elapsed,
'last_close': last_close,
'percent_change': percent_change,
'warnings': warnings
})
except Exception as e:
self.Debug(f"Error processing {symbol}: {e}")
continue
# Rank symbols
ranking_data.sort(key=lambda x: x['avg_daily_pct'], reverse=True)
self.ranked_symbols = ranking_data[:self.top_symbols_count]
# Log results
self.LogResults()
# Stop algorithm
self.Quit()
def GenerateWarnings(self, symbol, days_elapsed, percent_change, symbol_data):
"""
Generate warnings for symbols with potential data issues.
Returns a string of warnings or empty string if none.
"""
warnings = []
# Check for limited history
if days_elapsed < 30:
warnings.append("LimitedHistory(<30d)")
self.warning_counts["limited_history"] += 1
# Check for extreme returns (might indicate data error or symbol change)
if abs(percent_change) > 100000: # > 100,000%
warnings.append("ExtremeReturns")
self.warning_counts["extreme_returns"] += 1
# Check for data gaps (more than 10 day gap in middle of history)
if len(symbol_data) > 10:
dates = symbol_data.index.get_level_values(1)
max_gap = max((dates[i+1] - dates[i]).days for i in range(len(dates)-1))
if max_gap > 10:
warnings.append(f"DataGap({max_gap}d)")
self.warning_counts["data_quality"] += 1
return ", ".join(warnings) if warnings else ""
def LogResults(self):
"""
Log the ranking results in the specified format.
"""
self.Log("\n" + "=" * 60)
self.Log("IPO DATA")
self.Log("=" * 60)
for rank, data in enumerate(self.ranked_symbols, start=1):
# Format company name for log (remove special chars, truncate)
company_display = ''.join(c for c in data['company_name'] if c.isalnum() or c in ' .-')
if len(company_display) > 25:
company_display = company_display[:22] + "..."
# Build log line
log_line = (
f"{data['symbol'].Value}, "
f"IPORnk: {rank}, "
f"IPOAvgDay%: {data['avg_daily_pct']:.3f}%, "
f"{data['first_date'].strftime('%m/%d/%Y')}, "
f"IPO: ${data['first_open']:.2f}, "
f"Days: {data['days_elapsed']:,}, "
f"Price: ${data['last_close']:.2f}, "
f"IPO%Chng: {data['percent_change']:,.0f}%, "
f"Exch: {data['exchange']}, "
f"Company: {company_display}"
)
# Add warnings if present
if data['warnings']:
log_line += f", Warnings: [{data['warnings']}]"
self.Log(log_line)
# Log summary with processing time
self.LogSummary()
def LogSummary(self):
"""
Log a summary of the processing.
"""
process_time = time.time() - self.process_start_time
self.Log("\n" + "=" * 60)
self.Log("PROCESSING SUMMARY")
self.Log("=" * 60)
self.Log(f"Total symbols processed: {len(self.ranked_symbols)}")
self.Log(f"Processing time: {process_time:.1f} seconds ({process_time/60:.1f} minutes)")
self.Log(f"Symbols with warnings:")
self.Log(f" - Limited history (<30 days): {self.warning_counts['limited_history']}")
self.Log(f" - Extreme returns (>100,000%): {self.warning_counts['extreme_returns']}")
self.Log(f" - Data gaps (>10 days): {self.warning_counts['data_quality']}")
# Log earliest and latest symbols as examples
if self.ranked_symbols:
earliest = min(self.ranked_symbols, key=lambda x: x['first_date'])
latest = max(self.ranked_symbols, key=lambda x: x['first_date'])
self.Log(f"Earliest data point: {earliest['symbol'].Value} ({earliest['first_date'].strftime('%Y-%m-%d')})")
self.Log(f"Latest data point: {latest['symbol'].Value} ({latest['first_date'].strftime('%Y-%m-%d')})")
self.Log("\nNOTE: Always verify symbols against current trading data.")
self.Log("Some symbols may be delisted, changed, or have data discrepancies.")
def OnData(self, data):
"""Required method."""
pass
def OnSecuritiesChanged(self, changes):
"""Optional method."""
pass
## END OF CODE