This feature demonstrates parallel processing, task coordination, and cancellation tokens. Complete Implementation using System; using System.Collections.Concurrent; using System.Collections.Generic; using System.Linq; using System.Threading; using System.Threading.Tasks; namespace ParallelDataPipeline { // Custom data entity public class WorkItem { public int Id { get; set; } public string InputData { get; set; } public string ProcessedData { get; set; } public DateTime StartTime { get; set; } public DateTime EndTime { get; set; } public bool IsValid { get; set; } }
public ParallelPipelineProcessor(int parallelismLevel = 3) { _stages = new List<IPipelineStage<WorkItem, WorkItem>> { new ValidationStage(), new TransformationStage(), new EnrichmentStage() }; _parallelismLevel = parallelismLevel; } net fx 4.0
public async Task<WorkItem> ProcessAsync(WorkItem input, CancellationToken token) { // Simulate validation work await Task.Delay(50, token); input.IsValid = !string.IsNullOrWhiteSpace(input.InputData) && input.InputData.Length >= 3; if (!input.IsValid) { input.ProcessedData = "INVALID"; } Console.WriteLine($"[{StageName}] Item {input.Id}: Valid = {input.IsValid}"); return input; } } inputQueue : GetQueueForStage(stageIndex - 1)
// Stage 1: Data Validation public class ValidationStage : IPipelineStage<WorkItem, WorkItem> { public string StageName => "Validation"; var processedItem = await stage.ProcessAsync(item
// Create consumer tasks for each pipeline stage var stageTasks = new List<Task>(); for (int i = 0; i < _stages.Count; i++) { var stageIndex = i; var stage = _stages[stageIndex]; var nextQueue = (stageIndex < _stages.Count - 1) ? new BlockingCollection<WorkItem>() : null; var stageTask = Task.Run(async () => { var sourceQueue = (stageIndex == 0) ? inputQueue : GetQueueForStage(stageIndex - 1); foreach (var item in sourceQueue.GetConsumingEnumerable()) { cancellationToken.ThrowIfCancellationRequested(); progress?.Report($"Processing item {item.Id} in {stage.StageName}"); var processedItem = await stage.ProcessAsync(item, cancellationToken); if (nextQueue != null) { nextQueue.Add(processedItem, cancellationToken); } else { results.Add(processedItem); } } nextQueue?.CompleteAdding(); }, cancellationToken); stageTasks.Add(stageTask); StoreQueueForStage(stageIndex, nextQueue); } await Task.WhenAll(stageTasks.ToArray()); await producerTask; return results.ToList(); } private Dictionary<int, BlockingCollection<WorkItem>> _queues = new Dictionary<int, BlockingCollection<WorkItem>>(); private void StoreQueueForStage(int stageIndex, BlockingCollection<WorkItem> queue) { if (queue != null) _queues[stageIndex] = queue; } private BlockingCollection<WorkItem> GetQueueForStage(int stageIndex) { return _queues.ContainsKey(stageIndex) ? _queues[stageIndex] : null; } }
// Parallel Pipeline Processor public class ParallelPipelineProcessor { private readonly List<IPipelineStage<WorkItem, WorkItem>> _stages; private readonly int _parallelismLevel;
// Stage 2: Data Transformation public class TransformationStage : IPipelineStage<WorkItem, WorkItem> { public string StageName => "Transformation";