Compare commits

..

3 Commits

Author SHA1 Message Date
Billy Valentine
6c48a2ad05 Optimize AdvancedPositionSizer performance with object pooling and metrics tracking. Added performance tests.
Some checks failed
Build and Test / build (push) Has been cancelled
2025-09-09 19:48:06 -04:00
Billy Valentine
86422ff540 Implement advanced position sizing algorithms with Optimal f, Kelly Criterion, and volatility-adjusted methods
Some checks failed
Build and Test / build (push) Has been cancelled
2025-09-09 18:56:25 -04:00
Billy Valentine
23bb431d42 Implement market data handling and validation components 2025-09-09 18:36:30 -04:00
6 changed files with 1870 additions and 2 deletions

View File

@@ -163,7 +163,17 @@ namespace NT8.Core.Common.Models
/// <summary>
/// Optimal F calculation
/// </summary>
OptimalF
OptimalF,
/// <summary>
/// Kelly Criterion sizing
/// </summary>
KellyCriterion,
/// <summary>
/// Volatility-adjusted sizing
/// </summary>
VolatilityAdjusted
}
/// <summary>
@@ -438,4 +448,4 @@ namespace NT8.Core.Common.Models
ExecutionId = executionId;
}
}
}
}

View File

@@ -0,0 +1,275 @@
using System;
using System.Collections.Generic;
using NT8.Core.Common.Models;
namespace NT8.Core.MarketData
{
/// <summary>
/// Validates market data quality and detects anomalies
/// </summary>
public class DataQualityValidator
{
/// <summary>
/// Configuration for data quality validation
/// </summary>
public class DataQualityConfig
{
/// <summary>
/// Maximum allowed price change percentage between consecutive bars
/// </summary>
public double MaxPriceChangePercent { get; set; }
/// <summary>
/// Minimum allowed volume for a bar to be considered valid
/// </summary>
public long MinVolume { get; set; }
/// <summary>
/// Maximum allowed volume spike multiplier compared to average
/// </summary>
public double MaxVolumeSpikeMultiplier { get; set; }
/// <summary>
/// Constructor for DataQualityConfig
/// </summary>
public DataQualityConfig()
{
MaxPriceChangePercent = 10.0; // 10% default
MinVolume = 1; // Minimum 1 volume
MaxVolumeSpikeMultiplier = 100.0; // 100x volume spike threshold
}
}
private readonly DataQualityConfig _config;
private readonly Dictionary<string, List<double>> _priceHistory;
private readonly Dictionary<string, List<long>> _volumeHistory;
/// <summary>
/// Constructor for DataQualityValidator
/// </summary>
public DataQualityValidator(DataQualityConfig config = null)
{
_config = config ?? new DataQualityConfig();
_priceHistory = new Dictionary<string, List<double>>();
_volumeHistory = new Dictionary<string, List<long>>();
}
/// <summary>
/// Validate bar data quality and detect anomalies
/// </summary>
public DataQualityResult ValidateBar(BarData currentBar, BarData previousBar = null)
{
if (currentBar == null)
throw new ArgumentNullException("currentBar");
var result = new DataQualityResult();
// Basic validation
if (!BasicBarValidation(currentBar))
{
result.IsValid = false;
result.Reasons.Add("Basic validation failed");
return result;
}
// Previous bar validation
if (previousBar != null)
{
// Price change validation
var priceChangePercent = CalculatePriceChangePercent(previousBar.Close, currentBar.Open);
if (Math.Abs(priceChangePercent) > _config.MaxPriceChangePercent)
{
result.IsValid = false;
result.Reasons.Add(string.Format("Price change {0:F2}% exceeds threshold of {1:F2}%",
priceChangePercent, _config.MaxPriceChangePercent));
}
// Volume validation
if (currentBar.Volume < _config.MinVolume)
{
result.IsValid = false;
result.Reasons.Add(string.Format("Volume {0} below minimum threshold of {1}",
currentBar.Volume, _config.MinVolume));
}
// Volume spike detection
var avgVolume = CalculateAverageVolume(previousBar.Symbol);
if (avgVolume > 0 && currentBar.Volume > avgVolume * _config.MaxVolumeSpikeMultiplier)
{
result.IsValid = false;
result.Reasons.Add(string.Format("Volume spike detected: {0} vs average {1:F0}",
currentBar.Volume, avgVolume));
}
}
// Update history
UpdateHistory(currentBar);
result.IsValid = result.Reasons.Count == 0;
return result;
}
/// <summary>
/// Validate tick data quality and detect anomalies
/// </summary>
public DataQualityResult ValidateTick(TickData currentTick, TickData previousTick = null)
{
if (currentTick == null)
throw new ArgumentNullException("currentTick");
var result = new DataQualityResult();
// Basic validation
if (!BasicTickValidation(currentTick))
{
result.IsValid = false;
result.Reasons.Add("Basic validation failed");
return result;
}
// Previous tick validation
if (previousTick != null)
{
// Price change validation
var priceChangePercent = CalculatePriceChangePercent(previousTick.Price, currentTick.Price);
if (Math.Abs(priceChangePercent) > _config.MaxPriceChangePercent)
{
result.IsValid = false;
result.Reasons.Add(string.Format("Price change {0:F2}% exceeds threshold of {1:F2}%",
priceChangePercent, _config.MaxPriceChangePercent));
}
}
result.IsValid = result.Reasons.Count == 0;
return result;
}
/// <summary>
/// Basic bar validation
/// </summary>
private bool BasicBarValidation(BarData bar)
{
// Check for reasonable price values
if (bar.Open <= 0 || bar.High <= 0 || bar.Low <= 0 || bar.Close <= 0)
return false;
// Check for valid high/low relationships
if (bar.High < bar.Low)
return false;
// Check if close price is within high/low range
if (bar.Close < bar.Low || bar.Close > bar.High)
return false;
// Check for reasonable volume
if (bar.Volume < 0)
return false;
return true;
}
/// <summary>
/// Basic tick validation
/// </summary>
private bool BasicTickValidation(TickData tick)
{
// Check for reasonable price values
if (tick.Price <= 0)
return false;
// Check for reasonable size
if (tick.Size < 0)
return false;
return true;
}
/// <summary>
/// Calculate price change percentage
/// </summary>
private double CalculatePriceChangePercent(double previousPrice, double currentPrice)
{
if (previousPrice == 0)
return 0;
return ((currentPrice - previousPrice) / previousPrice) * 100;
}
/// <summary>
/// Calculate average volume for a symbol
/// </summary>
private double CalculateAverageVolume(string symbol)
{
List<long> volumes;
if (!_volumeHistory.TryGetValue(symbol, out volumes) || volumes.Count == 0)
return 0;
long sum = 0;
foreach (var volume in volumes)
{
sum += volume;
}
return (double)sum / volumes.Count;
}
/// <summary>
/// Update price and volume history
/// </summary>
private void UpdateHistory(BarData bar)
{
// Update price history
List<double> prices;
if (!_priceHistory.TryGetValue(bar.Symbol, out prices))
{
prices = new List<double>();
_priceHistory[bar.Symbol] = prices;
}
prices.Add(bar.Close);
if (prices.Count > 100) // Keep only last 100 prices
{
prices.RemoveAt(0);
}
// Update volume history
List<long> volumes;
if (!_volumeHistory.TryGetValue(bar.Symbol, out volumes))
{
volumes = new List<long>();
_volumeHistory[bar.Symbol] = volumes;
}
volumes.Add(bar.Volume);
if (volumes.Count > 100) // Keep only last 100 volumes
{
volumes.RemoveAt(0);
}
}
}
/// <summary>
/// Result of data quality validation
/// </summary>
public class DataQualityResult
{
/// <summary>
/// Whether the data is valid
/// </summary>
public bool IsValid { get; set; }
/// <summary>
/// Reasons for validation failure
/// </summary>
public List<string> Reasons { get; set; }
/// <summary>
/// Constructor for DataQualityResult
/// </summary>
public DataQualityResult()
{
IsValid = true;
Reasons = new List<string>();
}
}
}

View File

@@ -0,0 +1,262 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.IO.Compression;
using System.Text;
using System.Threading.Tasks;
using NT8.Core.Common.Models;
namespace NT8.Core.MarketData
{
/// <summary>
/// Manages historical market data storage, retrieval, and archiving
/// </summary>
public class HistoricalDataManager
{
private readonly string _dataDirectory;
private readonly bool _enableCompression;
/// <summary>
/// Constructor for HistoricalDataManager
/// </summary>
public HistoricalDataManager(string dataDirectory = null, bool enableCompression = true)
{
_dataDirectory = dataDirectory ?? Path.Combine(Environment.GetFolderPath(Environment.SpecialFolder.ApplicationData), "NT8", "MarketData");
_enableCompression = enableCompression;
// Ensure data directory exists
if (!Directory.Exists(_dataDirectory))
{
Directory.CreateDirectory(_dataDirectory);
}
}
/// <summary>
/// Save historical bars to storage
/// </summary>
public async Task SaveHistoricalBars(string symbol, TimeSpan barSize, List<BarData> bars)
{
if (string.IsNullOrEmpty(symbol))
throw new ArgumentException("Symbol cannot be null or empty", "symbol");
if (bars == null)
throw new ArgumentNullException("bars");
var fileName = GenerateFileName(symbol, barSize, DateTime.UtcNow);
var filePath = Path.Combine(_dataDirectory, fileName);
// Convert bars to CSV format
var csv = ConvertBarsToCsv(bars);
if (_enableCompression)
{
await SaveCompressedFile(filePath + ".gz", csv);
}
else
{
await SaveTextFile(filePath + ".csv", csv);
}
}
/// <summary>
/// Load historical bars from storage
/// </summary>
public async Task<List<BarData>> LoadHistoricalBars(string symbol, TimeSpan barSize, DateTime date)
{
if (string.IsNullOrEmpty(symbol))
throw new ArgumentException("Symbol cannot be null or empty", "symbol");
var fileName = GenerateFileName(symbol, barSize, date);
var filePath = Path.Combine(_dataDirectory, fileName);
string csv;
if (_enableCompression)
{
filePath += ".gz";
if (!File.Exists(filePath))
return new List<BarData>();
csv = await LoadCompressedFile(filePath);
}
else
{
filePath += ".csv";
if (!File.Exists(filePath))
return new List<BarData>();
csv = await LoadTextFile(filePath);
}
// Convert CSV back to BarData
return ConvertCsvToBars(csv);
}
/// <summary>
/// Archive old data files
/// </summary>
public async Task ArchiveOldData(DateTime olderThan)
{
var archiveDirectory = Path.Combine(_dataDirectory, "Archive");
if (!Directory.Exists(archiveDirectory))
{
Directory.CreateDirectory(archiveDirectory);
}
var files = Directory.GetFiles(_dataDirectory, "*.csv*");
foreach (var file in files)
{
var fileName = Path.GetFileName(file);
var fileDate = ExtractDateFromFileName(fileName);
if (fileDate < olderThan)
{
var archivePath = Path.Combine(archiveDirectory, fileName);
await Task.Run(() => File.Move(file, archivePath));
}
}
}
/// <summary>
/// Generate file name for data storage
/// </summary>
private string GenerateFileName(string symbol, TimeSpan barSize, DateTime date)
{
return string.Format("{0}_{1}_{2:yyyyMMdd}", symbol, (int)barSize.TotalMinutes, date);
}
/// <summary>
/// Extract date from file name
/// </summary>
private DateTime ExtractDateFromFileName(string fileName)
{
// Extract date portion from file name
var parts = fileName.Split('_');
if (parts.Length >= 3)
{
var datePart = parts[2].Split('.')[0]; // Remove extension
DateTime date;
if (DateTime.TryParseExact(datePart, "yyyyMMdd", null, System.Globalization.DateTimeStyles.None, out date))
{
return date;
}
}
return DateTime.MinValue;
}
/// <summary>
/// Convert bars to CSV format
/// </summary>
private string ConvertBarsToCsv(List<BarData> bars)
{
var sb = new StringBuilder();
// Header
sb.AppendLine("Symbol,Time,Open,High,Low,Close,Volume,BarSizeTicks");
// Data rows
foreach (var bar in bars)
{
sb.AppendFormat("{0},{1:yyyy-MM-dd HH:mm:ss},{2},{3},{4},{5},{6},{7}",
bar.Symbol,
bar.Time,
bar.Open,
bar.High,
bar.Low,
bar.Close,
bar.Volume,
bar.BarSize.Ticks);
sb.AppendLine();
}
return sb.ToString();
}
/// <summary>
/// Convert CSV to bars
/// </summary>
private List<BarData> ConvertCsvToBars(string csv)
{
var bars = new List<BarData>();
var lines = csv.Split(new[] { '\r', '\n' }, StringSplitOptions.RemoveEmptyEntries);
// Skip header line
for (int i = 1; i < lines.Length; i++)
{
var fields = lines[i].Split(',');
if (fields.Length >= 8)
{
try
{
var bar = new BarData(
fields[0], // Symbol
DateTime.Parse(fields[1]), // Time
double.Parse(fields[2]), // Open
double.Parse(fields[3]), // High
double.Parse(fields[4]), // Low
double.Parse(fields[5]), // Close
long.Parse(fields[6]), // Volume
TimeSpan.FromTicks(long.Parse(fields[7])) // BarSize
);
bars.Add(bar);
}
catch
{
// Skip invalid rows
}
}
}
return bars;
}
/// <summary>
/// Save compressed file
/// </summary>
private async Task SaveCompressedFile(string filePath, string content)
{
using (var fileStream = new FileStream(filePath, FileMode.Create))
using (var gzipStream = new GZipStream(fileStream, CompressionMode.Compress))
using (var writer = new StreamWriter(gzipStream))
{
await writer.WriteAsync(content);
}
}
/// <summary>
/// Load compressed file
/// </summary>
private async Task<string> LoadCompressedFile(string filePath)
{
using (var fileStream = new FileStream(filePath, FileMode.Open))
using (var gzipStream = new GZipStream(fileStream, CompressionMode.Decompress))
using (var reader = new StreamReader(gzipStream))
{
return await reader.ReadToEndAsync();
}
}
/// <summary>
/// Save text file
/// </summary>
private async Task SaveTextFile(string filePath, string content)
{
using (var writer = new StreamWriter(filePath))
{
await writer.WriteAsync(content);
}
}
/// <summary>
/// Load text file
/// </summary>
private async Task<string> LoadTextFile(string filePath)
{
using (var reader = new StreamReader(filePath))
{
return await reader.ReadToEndAsync();
}
}
}
}

View File

@@ -0,0 +1,314 @@
using System;
using System.Collections.Concurrent;
using System.Collections.Generic;
using System.Linq;
using System.Threading.Tasks;
using NT8.Core.Common.Models;
namespace NT8.Core.MarketData
{
/// <summary>
/// Base implementation of market data provider with caching and validation
/// </summary>
public class MarketDataProvider : IMarketDataProvider
{
// Data storage
private readonly ConcurrentDictionary<string, List<BarData>> _barCache;
private readonly ConcurrentDictionary<string, List<TickData>> _tickCache;
private readonly ConcurrentDictionary<string, double> _currentPrices;
// Subscriptions
private readonly ConcurrentDictionary<string, List<Action<BarData>>> _barSubscriptions;
private readonly ConcurrentDictionary<string, List<Action<TickData>>> _tickSubscriptions;
// Configuration
private readonly int _maxCacheSize;
private readonly TimeSpan _dataFreshnessTimeout;
/// <summary>
/// Constructor for MarketDataProvider
/// </summary>
public MarketDataProvider(int maxCacheSize = 10000, TimeSpan? dataFreshnessTimeout = null)
{
_barCache = new ConcurrentDictionary<string, List<BarData>>();
_tickCache = new ConcurrentDictionary<string, List<TickData>>();
_currentPrices = new ConcurrentDictionary<string, double>();
_barSubscriptions = new ConcurrentDictionary<string, List<Action<BarData>>>();
_tickSubscriptions = new ConcurrentDictionary<string, List<Action<TickData>>>();
_maxCacheSize = maxCacheSize;
_dataFreshnessTimeout = dataFreshnessTimeout ?? TimeSpan.FromMinutes(5);
}
/// <summary>
/// Subscribe to bar data
/// </summary>
public void SubscribeBars(string symbol, TimeSpan barSize, Action<BarData> onBar)
{
if (string.IsNullOrEmpty(symbol))
throw new ArgumentException("Symbol cannot be null or empty", "symbol");
if (onBar == null)
throw new ArgumentNullException("onBar");
string key = string.Format("{0}_{1}", symbol, barSize);
_barSubscriptions.AddOrUpdate(
key,
new List<Action<BarData>> { onBar },
(k, list) => { list.Add(onBar); return list; }
);
}
/// <summary>
/// Subscribe to tick data
/// </summary>
public void SubscribeTicks(string symbol, Action<TickData> onTick)
{
if (string.IsNullOrEmpty(symbol))
throw new ArgumentException("Symbol cannot be null or empty", "symbol");
if (onTick == null)
throw new ArgumentNullException("onTick");
_tickSubscriptions.AddOrUpdate(
symbol,
new List<Action<TickData>> { onTick },
(k, list) => { list.Add(onTick); return list; }
);
}
/// <summary>
/// Get historical bars
/// </summary>
public async Task<List<BarData>> GetHistoricalBars(string symbol, TimeSpan barSize, int count)
{
if (string.IsNullOrEmpty(symbol))
throw new ArgumentException("Symbol cannot be null or empty", "symbol");
if (count <= 0)
throw new ArgumentException("Count must be greater than zero", "count");
string key = string.Format("{0}_{1}", symbol, barSize);
List<BarData> bars;
if (_barCache.TryGetValue(key, out bars))
{
// Return the most recent bars, up to the requested count
var result = bars
.OrderByDescending(b => b.Time)
.Take(count)
.ToList();
return await Task.FromResult(result);
}
// Return empty list if no data is available
return await Task.FromResult(new List<BarData>());
}
/// <summary>
/// Get current market price
/// </summary>
public double? GetCurrentPrice(string symbol)
{
if (string.IsNullOrEmpty(symbol))
throw new ArgumentException("Symbol cannot be null or empty", "symbol");
double price;
if (_currentPrices.TryGetValue(symbol, out price))
{
return price;
}
return null;
}
/// <summary>
/// Add bar data to the provider
/// </summary>
public void AddBarData(BarData bar)
{
if (bar == null)
throw new ArgumentNullException("bar");
string key = string.Format("{0}_{1}", bar.Symbol, bar.BarSize);
// Add to cache
_barCache.AddOrUpdate(
key,
new List<BarData> { bar },
(k, list) =>
{
list.Add(bar);
// Trim cache if it exceeds maximum size
if (list.Count > _maxCacheSize)
{
list.RemoveRange(0, list.Count - _maxCacheSize);
}
return list;
}
);
// Update current price
_currentPrices[bar.Symbol] = bar.Close;
// Notify subscribers
List<Action<BarData>> subscribers;
if (_barSubscriptions.TryGetValue(key, out subscribers))
{
foreach (var subscriber in subscribers)
{
try
{
subscriber(bar);
}
catch
{
// Ignore exceptions in subscriber callbacks
}
}
}
}
/// <summary>
/// Add tick data to the provider
/// </summary>
public void AddTickData(TickData tick)
{
if (tick == null)
throw new ArgumentNullException("tick");
// Add to cache
_tickCache.AddOrUpdate(
tick.Symbol,
new List<TickData> { tick },
(k, list) =>
{
list.Add(tick);
// Trim cache if it exceeds maximum size
if (list.Count > _maxCacheSize)
{
list.RemoveRange(0, list.Count - _maxCacheSize);
}
return list;
}
);
// Update current price
_currentPrices[tick.Symbol] = tick.Price;
// Notify subscribers
List<Action<TickData>> subscribers;
if (_tickSubscriptions.TryGetValue(tick.Symbol, out subscribers))
{
foreach (var subscriber in subscribers)
{
try
{
subscriber(tick);
}
catch
{
// Ignore exceptions in subscriber callbacks
}
}
}
}
/// <summary>
/// Validate market data quality
/// </summary>
public bool ValidateDataQuality(BarData bar)
{
if (bar == null)
return false;
// Check for reasonable price values
if (bar.Open <= 0 || bar.High <= 0 || bar.Low <= 0 || bar.Close <= 0)
return false;
// Check for valid high/low relationships
if (bar.High < bar.Low)
return false;
// Check if close price is within high/low range
if (bar.Close < bar.Low || bar.Close > bar.High)
return false;
// Check for reasonable volume
if (bar.Volume < 0)
return false;
return true;
}
/// <summary>
/// Validate market data quality
/// </summary>
public bool ValidateDataQuality(TickData tick)
{
if (tick == null)
return false;
// Check for reasonable price values
if (tick.Price <= 0)
return false;
// Check for reasonable size
if (tick.Size < 0)
return false;
return true;
}
/// <summary>
/// Get data freshness information
/// </summary>
public DateTime? GetLastUpdateTime(string symbol)
{
if (string.IsNullOrEmpty(symbol))
return null;
// Check bars for this symbol
var barKeys = _barCache.Keys.Where(k => k.StartsWith(string.Format("{0}_", symbol))).ToList();
DateTime? latestBarTime = null;
foreach (var key in barKeys)
{
List<BarData> bars;
if (_barCache.TryGetValue(key, out bars) && bars.Count > 0)
{
var lastBarTime = bars.Max(b => b.Time);
if (latestBarTime == null || lastBarTime > latestBarTime)
{
latestBarTime = lastBarTime;
}
}
}
// Check ticks for this symbol
List<TickData> ticks;
if (_tickCache.TryGetValue(symbol, out ticks) && ticks.Count > 0)
{
var lastTickTime = ticks.Max(t => t.Time);
if (latestBarTime == null || lastTickTime > latestBarTime)
{
latestBarTime = lastTickTime;
}
}
return latestBarTime;
}
/// <summary>
/// Check if data is fresh
/// </summary>
public bool IsDataFresh(string symbol)
{
DateTime? lastUpdate = GetLastUpdateTime(symbol);
if (lastUpdate == null)
return false;
return DateTime.UtcNow - lastUpdate.Value < _dataFreshnessTimeout;
}
}
}

View File

@@ -0,0 +1,708 @@
using NT8.Core.Common.Models;
using NT8.Core.Logging;
using System;
using System.Collections.Concurrent;
using System.Collections.Generic;
using System.Linq;
namespace NT8.Core.Sizing
{
/// <summary>
/// Advanced position sizer with Optimal f, Kelly Criterion, and volatility-adjusted methods
/// Implements sophisticated position sizing algorithms for professional trading
/// </summary>
public class AdvancedPositionSizer : IPositionSizer
{
private readonly ILogger _logger;
// Performance metrics
private readonly SizingMetrics _metrics = new SizingMetrics();
// Object pools for frequently used objects
private readonly ConcurrentQueue<Dictionary<string, object>> _dictionaryPool = new ConcurrentQueue<Dictionary<string, object>>();
private readonly ConcurrentQueue<List<TradeResult>> _tradeListPool = new ConcurrentQueue<List<TradeResult>>();
// Pool sizes
private const int MaxPoolSize = 100;
public AdvancedPositionSizer(ILogger logger)
{
if (logger == null) throw new ArgumentNullException("logger");
_logger = logger;
}
public SizingResult CalculateSize(StrategyIntent intent, StrategyContext context, SizingConfig config)
{
if (intent == null) throw new ArgumentNullException("intent");
if (context == null) throw new ArgumentNullException("context");
if (config == null) throw new ArgumentNullException("config");
var startTime = DateTime.UtcNow;
// Validate intent is suitable for sizing
if (!intent.IsValid())
{
_logger.LogWarning("Invalid strategy intent provided for sizing: {0}", intent);
Dictionary<string, object> errorCalcs;
if (!_dictionaryPool.TryDequeue(out errorCalcs))
{
errorCalcs = new Dictionary<string, object>();
}
errorCalcs.Clear();
errorCalcs.Add("error", "Invalid intent");
var result = new SizingResult(0, 0, config.Method, errorCalcs);
// Record metrics
var endTime = DateTime.UtcNow;
var processingTime = (endTime - startTime).TotalMilliseconds;
_metrics.RecordOperation(config.Method, (long)processingTime);
return result;
}
SizingResult sizingResult;
switch (config.Method)
{
case SizingMethod.OptimalF:
sizingResult = CalculateOptimalF(intent, context, config);
break;
case SizingMethod.KellyCriterion:
sizingResult = CalculateKellyCriterion(intent, context, config);
break;
case SizingMethod.VolatilityAdjusted:
sizingResult = CalculateVolatilityAdjustedSizing(intent, context, config);
break;
default:
throw new NotSupportedException(String.Format("Sizing method {0} not supported in AdvancedPositionSizer", config.Method));
}
// Record metrics
var endTime2 = DateTime.UtcNow;
var processingTime2 = (endTime2 - startTime).TotalMilliseconds;
_metrics.RecordOperation(config.Method, (long)processingTime2);
return sizingResult;
}
private SizingResult CalculateOptimalF(StrategyIntent intent, StrategyContext context, SizingConfig config)
{
// Get trade history for calculating Optimal f
List<TradeResult> tradeHistory;
if (!_tradeListPool.TryDequeue(out tradeHistory))
{
tradeHistory = new List<TradeResult>();
}
tradeHistory.Clear();
tradeHistory.AddRange(GetRecentTradeHistory(context, config));
if (tradeHistory.Count == 0)
{
// Return trade history to pool
if (_tradeListPool.Count < MaxPoolSize)
{
_tradeListPool.Enqueue(tradeHistory);
}
// Fall back to fixed risk if no trade history
return CalculateFixedRiskFallback(intent, context, config);
}
// Calculate Optimal f
var optimalF = CalculateOptimalFValue(tradeHistory);
// Get account information
var equity = context.Account.Equity;
var maxLoss = GetMaximumLossFromHistory(tradeHistory);
// Calculate optimal contracts using Optimal f formula
// Contracts = (Optimal f * Equity) / Max Loss
var optimalContracts = (optimalF * equity) / Math.Abs(maxLoss);
// Round down to whole contracts (conservative approach)
var contracts = (int)Math.Floor(optimalContracts);
// Apply min/max clamping
contracts = Math.Max(config.MinContracts, Math.Min(config.MaxContracts, contracts));
// Calculate actual risk with final contract count
var tickValue = GetTickValue(intent.Symbol);
var riskPerContract = intent.StopTicks * tickValue;
var actualRisk = contracts * riskPerContract;
_logger.LogDebug("Optimal f sizing: {0} f={1:F4} ${2:F2}→{3:F2}→{4} contracts, ${5:F2} actual risk",
intent.Symbol, optimalF, equity, optimalContracts, contracts, actualRisk);
Dictionary<string, object> calculations;
if (!_dictionaryPool.TryDequeue(out calculations))
{
calculations = new Dictionary<string, object>();
}
calculations.Clear();
calculations.Add("optimal_f", optimalF);
calculations.Add("equity", equity);
calculations.Add("max_loss", maxLoss);
calculations.Add("optimal_contracts", optimalContracts);
calculations.Add("clamped_contracts", contracts);
calculations.Add("stop_ticks", intent.StopTicks);
calculations.Add("tick_value", tickValue);
calculations.Add("risk_per_contract", riskPerContract);
calculations.Add("actual_risk", actualRisk);
calculations.Add("min_contracts", config.MinContracts);
calculations.Add("max_contracts", config.MaxContracts);
// Return trade history to pool
if (_tradeListPool.Count < MaxPoolSize)
{
_tradeListPool.Enqueue(tradeHistory);
}
var result = new SizingResult(
contracts: contracts,
riskAmount: actualRisk,
method: SizingMethod.OptimalF,
calculations: calculations
);
return result;
}
private SizingResult CalculateKellyCriterion(StrategyIntent intent, StrategyContext context, SizingConfig config)
{
// Get trade history for calculating win rate and average win/loss
List<TradeResult> tradeHistory;
if (!_tradeListPool.TryDequeue(out tradeHistory))
{
tradeHistory = new List<TradeResult>();
}
tradeHistory.Clear();
tradeHistory.AddRange(GetRecentTradeHistory(context, config));
if (tradeHistory.Count == 0)
{
// Return trade history to pool
if (_tradeListPool.Count < MaxPoolSize)
{
_tradeListPool.Enqueue(tradeHistory);
}
// Fall back to fixed risk if no trade history
return CalculateFixedRiskFallback(intent, context, config);
}
// Calculate Kelly Criterion parameters
var winRate = CalculateWinRate(tradeHistory);
var avgWin = CalculateAverageWin(tradeHistory);
var avgLoss = CalculateAverageLoss(tradeHistory);
// Calculate Kelly Criterion fraction
// K = (bp - q) / b
// Where: b = avgWin/avgLoss (odds), p = winRate, q = 1 - winRate
var odds = avgWin / Math.Abs(avgLoss);
var kellyFraction = ((odds * winRate) - (1 - winRate)) / odds;
// Apply fractional Kelly to reduce risk (typically use 25%-50% of full Kelly)
var fractionalKelly = GetParameterValue<double>(config, "kelly_fraction", 0.5);
var adjustedKelly = kellyFraction * fractionalKelly;
// Calculate position size based on Kelly Criterion
var equity = context.Account.Equity;
var tickValue = GetTickValue(intent.Symbol);
var riskPerContract = intent.StopTicks * tickValue;
// Kelly position size = (Kelly Fraction * Equity) / Risk per contract
var kellyContracts = (adjustedKelly * equity) / riskPerContract;
var contracts = (int)Math.Floor(Math.Abs(kellyContracts));
// Apply min/max clamping
contracts = Math.Max(config.MinContracts, Math.Min(config.MaxContracts, contracts));
// Calculate actual risk with final contract count
var actualRisk = contracts * riskPerContract;
_logger.LogDebug("Kelly Criterion sizing: {0} K={1:F4} adj={2:F4} ${3:F2}→{4:F2}→{5} contracts, ${6:F2} actual risk",
intent.Symbol, kellyFraction, adjustedKelly, equity, kellyContracts, contracts, actualRisk);
Dictionary<string, object> calculations;
if (!_dictionaryPool.TryDequeue(out calculations))
{
calculations = new Dictionary<string, object>();
}
calculations.Clear();
calculations.Add("win_rate", winRate);
calculations.Add("avg_win", avgWin);
calculations.Add("avg_loss", avgLoss);
calculations.Add("odds", odds);
calculations.Add("kelly_fraction", kellyFraction);
calculations.Add("fractional_kelly", fractionalKelly);
calculations.Add("adjusted_kelly", adjustedKelly);
calculations.Add("equity", equity);
calculations.Add("tick_value", tickValue);
calculations.Add("risk_per_contract", riskPerContract);
calculations.Add("kelly_contracts", kellyContracts);
calculations.Add("clamped_contracts", contracts);
calculations.Add("actual_risk", actualRisk);
calculations.Add("min_contracts", config.MinContracts);
calculations.Add("max_contracts", config.MaxContracts);
// Return trade history to pool
if (_tradeListPool.Count < MaxPoolSize)
{
_tradeListPool.Enqueue(tradeHistory);
}
var result = new SizingResult(
contracts: contracts,
riskAmount: actualRisk,
method: SizingMethod.KellyCriterion,
calculations: calculations
);
return result;
}
private SizingResult CalculateVolatilityAdjustedSizing(StrategyIntent intent, StrategyContext context, SizingConfig config)
{
// Get volatility information
var atr = CalculateATR(context, intent.Symbol, 14); // 14-period ATR
var tickValue = GetTickValue(intent.Symbol);
// Get base risk from configuration
var baseRisk = config.RiskPerTrade;
// Apply volatility adjustment
// Higher volatility = lower position size, Lower volatility = higher position size
var volatilityAdjustment = CalculateVolatilityAdjustment(atr, intent.Symbol);
var adjustedRisk = baseRisk * volatilityAdjustment;
// Calculate contracts based on adjusted risk
var riskPerContract = intent.StopTicks * tickValue;
var optimalContracts = adjustedRisk / riskPerContract;
var contracts = (int)Math.Floor(optimalContracts);
// Apply min/max clamping
contracts = Math.Max(config.MinContracts, Math.Min(config.MaxContracts, contracts));
// Calculate actual risk with final contract count
var actualRisk = contracts * riskPerContract;
_logger.LogDebug("Volatility-adjusted sizing: {0} ATR={1:F4} adj={2:F4} ${3:F2}→${4:F2}→{5} contracts, ${6:F2} actual risk",
intent.Symbol, atr, volatilityAdjustment, baseRisk, adjustedRisk, contracts, actualRisk);
Dictionary<string, object> calculations;
if (!_dictionaryPool.TryDequeue(out calculations))
{
calculations = new Dictionary<string, object>();
}
calculations.Clear();
calculations.Add("atr", atr);
calculations.Add("volatility_adjustment", volatilityAdjustment);
calculations.Add("base_risk", baseRisk);
calculations.Add("adjusted_risk", adjustedRisk);
calculations.Add("tick_value", tickValue);
calculations.Add("risk_per_contract", riskPerContract);
calculations.Add("optimal_contracts", optimalContracts);
calculations.Add("clamped_contracts", contracts);
calculations.Add("actual_risk", actualRisk);
calculations.Add("stop_ticks", intent.StopTicks);
calculations.Add("min_contracts", config.MinContracts);
calculations.Add("max_contracts", config.MaxContracts);
var result = new SizingResult(
contracts: contracts,
riskAmount: actualRisk,
method: SizingMethod.VolatilityAdjusted,
calculations: calculations
);
return result;
}
private SizingResult CalculateFixedRiskFallback(StrategyIntent intent, StrategyContext context, SizingConfig config)
{
var tickValue = GetTickValue(intent.Symbol);
// Validate stop ticks
if (intent.StopTicks <= 0)
{
_logger.LogWarning("Invalid stop ticks {0} for fixed risk sizing on {1}",
intent.StopTicks, intent.Symbol);
Dictionary<string, object> errorCalcs;
if (!_dictionaryPool.TryDequeue(out errorCalcs))
{
errorCalcs = new Dictionary<string, object>();
}
errorCalcs.Clear();
errorCalcs.Add("error", "Invalid stop ticks");
errorCalcs.Add("stop_ticks", intent.StopTicks);
var errorResult = new SizingResult(0, 0, SizingMethod.FixedDollarRisk, errorCalcs);
return errorResult;
}
// Calculate optimal contracts for target risk
var targetRisk = config.RiskPerTrade;
var riskPerContract = intent.StopTicks * tickValue;
var optimalContracts = targetRisk / riskPerContract;
// Round down to whole contracts (conservative approach)
var contracts = (int)Math.Floor(optimalContracts);
// Apply min/max clamping
contracts = Math.Max(config.MinContracts, Math.Min(config.MaxContracts, contracts));
// Calculate actual risk with final contract count
var actualRisk = contracts * riskPerContract;
_logger.LogDebug("Fixed risk fallback sizing: {0} ${1:F2}→{2:F2}→{3} contracts, ${4:F2} actual risk",
intent.Symbol, targetRisk, optimalContracts, contracts, actualRisk);
Dictionary<string, object> calculations;
if (!_dictionaryPool.TryDequeue(out calculations))
{
calculations = new Dictionary<string, object>();
}
calculations.Clear();
calculations.Add("target_risk", targetRisk);
calculations.Add("stop_ticks", intent.StopTicks);
calculations.Add("tick_value", tickValue);
calculations.Add("risk_per_contract", riskPerContract);
calculations.Add("optimal_contracts", optimalContracts);
calculations.Add("clamped_contracts", contracts);
calculations.Add("actual_risk", actualRisk);
calculations.Add("min_contracts", config.MinContracts);
calculations.Add("max_contracts", config.MaxContracts);
var result = new SizingResult(
contracts: contracts,
riskAmount: actualRisk,
method: SizingMethod.FixedDollarRisk,
calculations: calculations
);
return result;
}
private static double CalculateOptimalFValue(List<TradeResult> tradeHistory)
{
if (tradeHistory == null || tradeHistory.Count == 0)
return 0.0;
// Find the largest loss (in absolute terms)
var largestLoss = Math.Abs(tradeHistory.Min(t => t.ProfitLoss));
if (largestLoss == 0)
return 0.0;
// Calculate Optimal f using the formula:
// f = (N*R - T) / (N*L)
// Where: N = number of trades, R = average win, L = largest loss, T = total profit
var n = tradeHistory.Count;
var totalProfit = tradeHistory.Sum(t => t.ProfitLoss);
var averageWin = tradeHistory.Where(t => t.ProfitLoss > 0).DefaultIfEmpty(new TradeResult()).Average(t => t.ProfitLoss);
if (averageWin <= 0)
return 0.0;
var optimalF = (n * averageWin - totalProfit) / (n * largestLoss);
// Ensure f is between 0 and 1
return Math.Max(0.0, Math.Min(1.0, optimalF));
}
private static double CalculateWinRate(List<TradeResult> tradeHistory)
{
if (tradeHistory == null || tradeHistory.Count == 0)
return 0.0;
var winningTrades = tradeHistory.Count(t => t.ProfitLoss > 0);
return (double)winningTrades / tradeHistory.Count;
}
private static double CalculateAverageWin(List<TradeResult> tradeHistory)
{
if (tradeHistory == null || tradeHistory.Count == 0)
return 0.0;
var winningTrades = tradeHistory.Where(t => t.ProfitLoss > 0).ToList();
if (winningTrades.Count == 0)
return 0.0;
return winningTrades.Average(t => t.ProfitLoss);
}
private static double CalculateAverageLoss(List<TradeResult> tradeHistory)
{
if (tradeHistory == null || tradeHistory.Count == 0)
return 0.0;
var losingTrades = tradeHistory.Where(t => t.ProfitLoss < 0).ToList();
if (losingTrades.Count == 0)
return 0.0;
return losingTrades.Average(t => t.ProfitLoss);
}
private static double GetMaximumLossFromHistory(List<TradeResult> tradeHistory)
{
if (tradeHistory == null || tradeHistory.Count == 0)
return 0.0;
return tradeHistory.Min(t => t.ProfitLoss);
}
private static double CalculateATR(StrategyContext context, string symbol, int periods)
{
// This would typically involve retrieving historical bar data
// For this implementation, we'll use a simplified approach
return 1.0; // Placeholder value
}
private static double CalculateVolatilityAdjustment(double atr, string symbol)
{
// Normalize ATR to a volatility adjustment factor
// Higher ATR = lower adjustment (reduce position size)
// Lower ATR = higher adjustment (increase position size)
// This is a simplified example - in practice, you'd normalize against
// historical ATR values for the specific symbol
var normalizedATR = atr / 10.0; // Example normalization
var adjustment = 1.0 / (1.0 + normalizedATR);
// Ensure adjustment is between 0.1 and 2.0
return Math.Max(0.1, Math.Min(2.0, adjustment));
}
private static List<TradeResult> GetRecentTradeHistory(StrategyContext context, SizingConfig config)
{
// In a real implementation, this would retrieve actual trade history
// For this example, we'll return an empty list to trigger fallback behavior
return new List<TradeResult>();
}
private static T GetParameterValue<T>(SizingConfig config, string key, T defaultValue)
{
if (config.MethodParameters.ContainsKey(key))
{
try
{
return (T)Convert.ChangeType(config.MethodParameters[key], typeof(T));
}
catch
{
// If conversion fails, return default
return defaultValue;
}
}
return defaultValue;
}
private static double GetTickValue(string symbol)
{
// Static tick values for Phase 0 - will be configurable in Phase 1
switch (symbol)
{
case "ES": return 12.50; // E-mini S&P 500
case "MES": return 1.25; // Micro E-mini S&P 500
case "NQ": return 5.00; // E-mini NASDAQ-100
case "MNQ": return 0.50; // Micro E-mini NASDAQ-100
case "CL": return 10.00; // Crude Oil
case "GC": return 10.00; // Gold
case "6E": return 12.50; // Euro FX
case "6A": return 10.00; // Australian Dollar
default: return 12.50; // Default to ES value
}
}
public SizingMetadata GetMetadata()
{
var requiredParams = new List<string>();
requiredParams.Add("method");
requiredParams.Add("risk_per_trade");
requiredParams.Add("min_contracts");
requiredParams.Add("max_contracts");
return new SizingMetadata(
name: "Advanced Position Sizer",
description: "Optimal f, Kelly Criterion, and volatility-adjusted sizing with contract clamping",
requiredParameters: requiredParams
);
}
/// <summary>
/// Get current performance metrics snapshot
/// </summary>
public SizingMetricsSnapshot GetMetricsSnapshot()
{
return _metrics.GetSnapshot();
}
/// <summary>
/// Validate sizing configuration parameters
/// </summary>
public static bool ValidateConfig(SizingConfig config, out List<string> errors)
{
errors = new List<string>();
if (config.MinContracts < 0)
errors.Add("MinContracts must be >= 0");
if (config.MaxContracts <= 0)
errors.Add("MaxContracts must be > 0");
if (config.MinContracts > config.MaxContracts)
errors.Add("MinContracts must be <= MaxContracts");
if (config.RiskPerTrade <= 0)
errors.Add("RiskPerTrade must be > 0");
// Method-specific validation
switch (config.Method)
{
case SizingMethod.OptimalF:
// No additional parameters required for Optimal f
break;
case SizingMethod.KellyCriterion:
// Validate Kelly fraction parameter if provided
if (config.MethodParameters.ContainsKey("kelly_fraction"))
{
var kellyFraction = GetParameterValue<double>(config, "kelly_fraction", 0.5);
if (kellyFraction <= 0 || kellyFraction > 1.0)
errors.Add("Kelly fraction must be between 0 and 1.0");
}
break;
case SizingMethod.VolatilityAdjusted:
// No additional parameters required for volatility-adjusted sizing
break;
default:
errors.Add(String.Format("Unsupported sizing method: {0}", config.Method));
break;
}
return errors.Count == 0;
}
/// <summary>
/// Internal class to represent trade results for calculations
/// </summary>
private class TradeResult
{
public double ProfitLoss { get; set; }
public DateTime TradeTime { get; set; }
public TradeResult()
{
ProfitLoss = 0.0;
TradeTime = DateTime.UtcNow;
}
public TradeResult(double profitLoss, DateTime tradeTime)
{
ProfitLoss = profitLoss;
TradeTime = tradeTime;
}
}
}
/// <summary>
/// Performance metrics for sizing operations
/// </summary>
public class SizingMetrics
{
// Operation counters
public long TotalOperations { get; private set; }
public long OptimalFOperations { get; private set; }
public long KellyCriterionOperations { get; private set; }
public long VolatilityAdjustedOperations { get; private set; }
public long FallbackOperations { get; private set; }
// Timing metrics
public long TotalProcessingTimeMs { get; private set; }
public long MaxProcessingTimeMs { get; private set; }
public long MinProcessingTimeMs { get; private set; }
// Thread-safe counters
private readonly object _lock = new object();
public SizingMetrics()
{
MinProcessingTimeMs = long.MaxValue;
}
public void RecordOperation(SizingMethod method, long processingTimeMs)
{
lock (_lock)
{
TotalOperations++;
TotalProcessingTimeMs += processingTimeMs;
// Update min/max timing
if (processingTimeMs > MaxProcessingTimeMs)
MaxProcessingTimeMs = processingTimeMs;
if (processingTimeMs < MinProcessingTimeMs)
MinProcessingTimeMs = processingTimeMs;
// Update method-specific counters
switch (method)
{
case SizingMethod.OptimalF:
OptimalFOperations++;
break;
case SizingMethod.KellyCriterion:
KellyCriterionOperations++;
break;
case SizingMethod.VolatilityAdjusted:
VolatilityAdjustedOperations++;
break;
case SizingMethod.FixedDollarRisk:
FallbackOperations++;
break;
}
}
}
public SizingMetricsSnapshot GetSnapshot()
{
lock (_lock)
{
return new SizingMetricsSnapshot
{
TotalOperations = TotalOperations,
OptimalFOperations = OptimalFOperations,
KellyCriterionOperations = KellyCriterionOperations,
VolatilityAdjustedOperations = VolatilityAdjustedOperations,
FallbackOperations = FallbackOperations,
TotalProcessingTimeMs = TotalProcessingTimeMs,
MaxProcessingTimeMs = MaxProcessingTimeMs,
MinProcessingTimeMs = MinProcessingTimeMs,
AverageProcessingTimeMs = TotalOperations > 0 ? (double)TotalProcessingTimeMs / TotalOperations : 0
};
}
}
}
/// <summary>
/// Snapshot of sizing metrics
/// </summary>
public class SizingMetricsSnapshot
{
public long TotalOperations { get; set; }
public long OptimalFOperations { get; set; }
public long KellyCriterionOperations { get; set; }
public long VolatilityAdjustedOperations { get; set; }
public long FallbackOperations { get; set; }
public long TotalProcessingTimeMs { get; set; }
public long MaxProcessingTimeMs { get; set; }
public long MinProcessingTimeMs { get; set; }
public double AverageProcessingTimeMs { get; set; }
}
}

View File

@@ -0,0 +1,299 @@
using Microsoft.VisualStudio.TestTools.UnitTesting;
using NT8.Core.Common.Models;
using NT8.Core.Logging;
using NT8.Core.Sizing;
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.Threading.Tasks;
namespace NT8.Core.Tests.Sizing
{
[TestClass]
public class AdvancedPositionSizerPerformanceTests
{
private TestLogger _logger;
private AdvancedPositionSizer _positionSizer;
[TestInitialize]
public void TestInitialize()
{
_logger = new TestLogger();
_positionSizer = new AdvancedPositionSizer(_logger);
}
[TestMethod]
public void AdvancedPositionSizer_Performance_MetricsRecording()
{
// Arrange
var intent = CreateValidIntent();
var context = CreateTestContext();
var config = CreateTestSizingConfig(SizingMethod.OptimalF);
// Act
var stopwatch = Stopwatch.StartNew();
var result = _positionSizer.CalculateSize(intent, context, config);
stopwatch.Stop();
// Assert
Assert.IsNotNull(result);
Assert.IsTrue(result.Contracts >= 0);
// Check that metrics were recorded
var metricsSnapshot = _positionSizer.GetMetricsSnapshot();
Assert.IsNotNull(metricsSnapshot);
Assert.IsTrue(metricsSnapshot.TotalOperations >= 1);
Assert.IsTrue(metricsSnapshot.OptimalFOperations >= 1);
Assert.IsTrue(metricsSnapshot.TotalProcessingTimeMs >= 0);
Assert.IsTrue(metricsSnapshot.AverageProcessingTimeMs >= 0);
}
[TestMethod]
public void AdvancedPositionSizer_Performance_ObjectPooling()
{
// Arrange
var intent = CreateValidIntent();
var context = CreateTestContext();
var config = CreateTestSizingConfig(SizingMethod.KellyCriterion);
// Act & Assert
// Run multiple calculations to test object pooling
for (int i = 0; i < 100; i++)
{
var result = _positionSizer.CalculateSize(intent, context, config);
Assert.IsNotNull(result);
Assert.IsTrue(result.Contracts >= 0);
}
// Check that we still have reasonable performance
var metricsSnapshot = _positionSizer.GetMetricsSnapshot();
Assert.IsTrue(metricsSnapshot.TotalOperations >= 100);
Assert.IsTrue(metricsSnapshot.AverageProcessingTimeMs < 100); // Should be fast with pooling
}
[TestMethod]
public async Task AdvancedPositionSizer_Performance_ConcurrentAccess()
{
// Arrange
var tasks = new List<Task<SizingResult>>();
var intent = CreateValidIntent();
var context = CreateTestContext();
var config = CreateTestSizingConfig(SizingMethod.VolatilityAdjusted);
// Act
var stopwatch = Stopwatch.StartNew();
for (int i = 0; i < 50; i++)
{
var task = Task.Run(() => _positionSizer.CalculateSize(intent, context, config));
tasks.Add(task);
}
var results = await Task.WhenAll(tasks);
stopwatch.Stop();
// Assert
Assert.AreEqual(50, results.Length);
foreach (var result in results)
{
Assert.IsNotNull(result);
Assert.IsTrue(result.Contracts >= 0);
}
// Check metrics
var metricsSnapshot = _positionSizer.GetMetricsSnapshot();
Assert.IsTrue(metricsSnapshot.TotalOperations >= 50);
Assert.IsTrue(metricsSnapshot.VolatilityAdjustedOperations >= 50);
}
[TestMethod]
public void AdvancedPositionSizer_Performance_Throughput()
{
// Arrange
var intent = CreateValidIntent();
var context = CreateTestContext();
var config = CreateTestSizingConfig(SizingMethod.OptimalF);
// Act
var stopwatch = Stopwatch.StartNew();
const int iterations = 1000;
for (int i = 0; i < iterations; i++)
{
var result = _positionSizer.CalculateSize(intent, context, config);
Assert.IsNotNull(result);
}
stopwatch.Stop();
// Assert
var metricsSnapshot = _positionSizer.GetMetricsSnapshot();
Assert.IsTrue(metricsSnapshot.TotalOperations >= iterations);
// Calculate throughput
var throughput = (double)iterations / stopwatch.Elapsed.TotalSeconds;
Assert.IsTrue(throughput > 100); // Should process at least 100 operations per second
_logger.LogInformation(String.Format("Processed {0} operations in {1:F2} ms ({2:F2} ops/sec)",
iterations, stopwatch.Elapsed.TotalMilliseconds, throughput));
}
[TestMethod]
public void AdvancedPositionSizer_Performance_MemoryAllocation()
{
// Arrange
var intent = CreateValidIntent();
var context = CreateTestContext();
var config = CreateTestSizingConfig(SizingMethod.KellyCriterion);
// Force garbage collection to get a clean baseline
GC.Collect();
GC.WaitForPendingFinalizers();
GC.Collect();
var initialMemory = GC.GetTotalMemory(false);
// Act
for (int i = 0; i < 1000; i++)
{
var result = _positionSizer.CalculateSize(intent, context, config);
Assert.IsNotNull(result);
}
// Force garbage collection
GC.Collect();
GC.WaitForPendingFinalizers();
var finalMemory = GC.GetTotalMemory(false);
// Assert - with object pooling, memory growth should be minimal
var memoryGrowth = finalMemory - initialMemory;
Assert.IsTrue(memoryGrowth < 10 * 1024 * 1024); // Less than 10MB growth for 1000 operations
_logger.LogInformation(String.Format("Memory growth: {0:F2} KB for 1000 operations",
memoryGrowth / 1024.0));
}
[TestMethod]
public void AdvancedPositionSizer_Performance_MetricsConsistency()
{
// Arrange
var intent = CreateValidIntent();
var context = CreateTestContext();
var config1 = CreateTestSizingConfig(SizingMethod.OptimalF);
var config2 = CreateTestSizingConfig(SizingMethod.KellyCriterion);
var config3 = CreateTestSizingConfig(SizingMethod.VolatilityAdjusted);
// Act
// Run mixed operations
for (int i = 0; i < 30; i++)
{
_positionSizer.CalculateSize(intent, context, config1);
_positionSizer.CalculateSize(intent, context, config2);
_positionSizer.CalculateSize(intent, context, config3);
}
// Assert
var metricsSnapshot = _positionSizer.GetMetricsSnapshot();
Assert.IsTrue(metricsSnapshot.TotalOperations >= 90);
Assert.IsTrue(metricsSnapshot.OptimalFOperations >= 30);
Assert.IsTrue(metricsSnapshot.KellyCriterionOperations >= 30);
Assert.IsTrue(metricsSnapshot.VolatilityAdjustedOperations >= 30);
// Verify timing metrics are reasonable
Assert.IsTrue(metricsSnapshot.MaxProcessingTimeMs >= metricsSnapshot.MinProcessingTimeMs);
Assert.IsTrue(metricsSnapshot.AverageProcessingTimeMs >= 0);
}
#region Helper Methods
private StrategyIntent CreateValidIntent(
string symbol = "ES",
int stopTicks = 8,
OrderSide side = OrderSide.Buy)
{
return new StrategyIntent(
symbol: symbol,
side: side,
entryType: OrderType.Market,
limitPrice: null,
stopTicks: stopTicks,
targetTicks: 16,
confidence: 0.8,
reason: "Test intent",
metadata: new Dictionary<string, object>()
);
}
private StrategyContext CreateTestContext(string symbol = "ES")
{
return new StrategyContext(
symbol: symbol,
currentTime: DateTime.UtcNow,
currentPosition: new Position(symbol, 0, 0, 0, 0, DateTime.UtcNow),
account: new AccountInfo(50000, 50000, 0, 0, DateTime.UtcNow),
session: new MarketSession(DateTime.Today.AddHours(9.5), DateTime.Today.AddHours(16), true, "RTH"),
customData: new Dictionary<string, object>()
);
}
private SizingConfig CreateTestSizingConfig(SizingMethod method)
{
var methodParameters = new Dictionary<string, object>();
if (method == SizingMethod.KellyCriterion)
{
methodParameters.Add("kelly_fraction", 0.5);
}
return new SizingConfig(
method: method,
minContracts: 1,
maxContracts: 10,
riskPerTrade: 500,
methodParameters: methodParameters
);
}
#endregion
}
/// <summary>
/// Test implementation of ILogger for testing
/// </summary>
public class TestLogger : ILogger
{
public void LogCritical(string message, params object[] args)
{
// No-op for testing
}
public void LogDebug(string message, params object[] args)
{
// No-op for testing
}
public void LogError(string message, params object[] args)
{
// No-op for testing
}
public void LogError(Exception exception, string message, params object[] args)
{
// No-op for testing
}
public void LogInformation(string message, params object[] args)
{
// No-op for testing
Console.WriteLine("[INFO] " + message, args);
}
public void LogWarning(string message, params object[] args)
{
// No-op for testing
}
public bool IsEnabled(int logLevel)
{
return true;
}
}
}