Hi all,
I’m currently running a custom NinjaTrader 8 strategy that performs forward backtesting with logging of multiple indicator values across several timeframes (including non-standard like 8 Range and 512 Tick). Logging is performed at the close of the 1-minute bar to export data to a .csv
file.
However, when I run the strategy at high speed (e.g., x100), I’m encountering issues.
Questions:
- Is the speed (x100) during forward testing interfering with data series synchronization or causing indicators not to be fully ready at
BarsInProgress == 0
? - Is there a recommended way to ensure that indicator values are fully initialized and correctly synchronized before logging at bar close in high-speed forward testing?
- Are there best practices to ensure stability and data integrity for logging under high-speed simulation?
Any tips or suggestions would be highly appreciated! I’m trying to maintain reliable data for AI training, so precision and completeness are key.
Thanks in advance!
These are the code snippets I use for logging, shown in chronological order without the rest of the script. If the full script is needed, I can post it as well.
#region Logging Properties
[NinjaScriptProperty]
[Display(Name=“Enable Logging”, Order=15, GroupName=“General”)]
public bool EnableLogging { get; set; } = true;
#endregion
#region Logging Private Fields
private string filePath;
private bool headerWritten = false;
private StringBuilder csvBuilder;
#endregion
#region Logging Setup Method
private void SetupLogging()
{
if (EnableLogging)
{
try
{
string filename = $“MultiTimeframeIndicators_{Instrument.FullName}_{DateTime.Now:yyyyMMdd_HHmmss}.csv”;
filePath = Path.Combine(Environment.GetFolderPath(Environment.SpecialFolder.MyDocuments),
“NinjaTrader 8”, “log”, filename);
Directory.CreateDirectory(Path.GetDirectoryName(filePath));
Print($"Logging to: {filePath}");
}
catch (Exception ex)
{
Print($"Error setting up logging: {ex.Message}");
EnableLogging = false;
}
}
}
#endregion
#region CSV Header Method
private void WriteHeader()
{
try
{
csvBuilder.Clear();
csvBuilder.Append(“Timestamp,CET_Time,Trading_Hours_Status”);
for (int i = 0; i < timeframeNames.Length; i++)
{
string tf = timeframeNames[i];
// OHLCV
csvBuilder.Append($",{tf}_Open,{tf}_High,{tf}_Low,{tf}_Close,{tf}_Volume");
// Core Technical Indicators
csvBuilder.Append($",{tf}_EMA_Fast,{tf}_EMA_Slow,{tf}_RSI,{tf}_ATR");
csvBuilder.Append($",{tf}_MACD,{tf}_MACD_Signal,{tf}_MACD_Hist");
csvBuilder.Append($",{tf}_BB_Upper,{tf}_BB_Middle,{tf}_BB_Lower,{tf}_BB_Width,{tf}_BB_Position");
csvBuilder.Append($",{tf}_Stoch_K,{tf}_Stoch_D,{tf}_ADX,{tf}_DI_Plus,{tf}_DI_Minus");
csvBuilder.Append($",{tf}_OBV,{tf}_MFI,{tf}_VPT");
csvBuilder.Append($",{tf}_Donchian_High,{tf}_Donchian_Low,{tf}_ZigZag");
csvBuilder.Append($",{tf}_RelVol,{tf}_Choppiness,{tf}_VWAP,{tf}_Trend_Strength");
// Smart Money Concepts (if enabled)
if (EnableSmartMoney)
{
csvBuilder.Append($",{tf}_FVG_Bullish,{tf}_FVG_Bearish");
csvBuilder.Append($",{tf}_Swing_High,{tf}_Swing_Low");
csvBuilder.Append($",{tf}_BoS_Up,{tf}_BoS_Down");
csvBuilder.Append($",{tf}_CHoCH,{tf}_Trend");
}
else
{
csvBuilder.Append($",{tf}_SM_NA1,{tf}_SM_NA2,{tf}_SM_NA3,{tf}_SM_NA4");
csvBuilder.Append($",{tf}_SM_NA5,{tf}_SM_NA6,{tf}_SM_NA7,{tf}_SM_NA8");
}
}
WriteLine(csvBuilder.ToString());
}
catch (Exception ex)
{
Print($"Error writing header: {ex.Message}");
}
}
#endregion
#region Data Processing for Logging
private void ProcessAllTimeframesData()
{
try
{
DateTime logTime = Times[0][0];
DateTime cetTime = ConvertToCET(logTime);
string tradingHoursStatus = GetTradingHoursStatus(logTime);
csvBuilder.Clear();
csvBuilder.Append($"{logTime:yyyy-MM-dd HH:mm:ss},{cetTime:yyyy-MM-dd HH:mm:ss},{tradingHoursStatus}");
// Process Smart Money in parallel (if enabled)
if (EnableSmartMoney)
{
Parallel.For(0, NUM_TIMEFRAMES, new ParallelOptions { MaxDegreeOfParallelism = 2 }, i =>
{
if (CurrentBars[i] >= 2 * MarketStructurePeriod + 1)
{
AnalyzeSmartMoney(i);
}
});
}
// Append data for each timeframe
for (int i = 0; i < NUM_TIMEFRAMES; i++)
{
if (CurrentBars[i] < 1)
{
AppendInsufficientDataValues();
continue;
}
AppendTimeframeData(i);
}
WriteLine(csvBuilder.ToString());
}
catch (Exception ex)
{
Print($"Error processing timeframes data: {ex.Message}");
}
}
#endregion
#region File Write Operations
private void WriteLine(string text)
{
if (!EnableLogging) return;
try
{
using (StreamWriter sw = File.AppendText(filePath))
{
sw.WriteLine(text);
sw.Flush();
}
}
catch (IOException ioEx)
{
Print($"IO Error: {ioEx.Message}");
try
{
string backupPath = filePath.Replace(".csv", $"_backup_{DateTime.Now:HHmmss}.csv");
using (StreamWriter sw = File.AppendText(backupPath))
{
sw.WriteLine(text);
}
Print($"Backup created: {backupPath}");
}
catch
{
Print("Failed to create backup file");
}
}
catch (Exception ex)
{
Print($"Error writing file: {ex.Message}");
}
}
#endregion
#region Logging-related calls in OnBarUpdate()
// Write header on first run
if (!headerWritten && EnableLogging)
{
WriteHeader();
headerWritten = true;
}
// Process and log data
if (EnableLogging)
{
ProcessAllTimeframesData();
}
#endregion
#region Logging initialization in DataLoaded state
csvBuilder = new StringBuilder(6144);
SetupLogging();
#endregion