using System;
using Unity.Jobs.LowLevel.Unsafe;
using Unity.Collections.LowLevel.Unsafe;
using Unity.Collections;
using System.Diagnostics;
using Unity.Burst;
namespace Unity.Jobs
{
///
/// Job type allowing for data to be operated on in parallel batches.
///
///
/// When scheduling an IJobParallelForBatch job the number of elements to work on is specified along with a batch size. Jobs will then run in parallel
/// invoking Execute at a particular 'startIndex' of your working set and for a specified 'count' number of elements.
///
[JobProducerType(typeof(IJobParallelForBatchExtensions.JobParallelForBatchProducer<>))]
public interface IJobParallelForBatch
{
///
/// Function operation on a "batch" of data contained within the job.
///
/// Starting index of job data to safely access.
/// Number of elements to operate on in the batch.
void Execute(int startIndex, int count);
}
///
/// Extension class for the IJobParallelForBatch job type providing custom overloads for scheduling and running.
///
public static class IJobParallelForBatchExtensions
{
internal struct JobParallelForBatchProducer where T : struct, IJobParallelForBatch
{
internal static readonly SharedStatic jobReflectionData = SharedStatic.GetOrCreate>();
[BurstDiscard]
internal static void Initialize()
{
if (jobReflectionData.Data == IntPtr.Zero)
jobReflectionData.Data = JobsUtility.CreateJobReflectionData(typeof(T), (ExecuteJobFunction)Execute);
}
internal delegate void ExecuteJobFunction(ref T jobData, IntPtr additionalPtr, IntPtr bufferRangePatchData, ref JobRanges ranges, int jobIndex);
public unsafe static void Execute(ref T jobData, IntPtr additionalPtr, IntPtr bufferRangePatchData, ref JobRanges ranges, int jobIndex)
{
while (true)
{
if (!JobsUtility.GetWorkStealingRange(
ref ranges,
jobIndex, out int begin, out int end))
return;
#if ENABLE_UNITY_COLLECTIONS_CHECKS
JobsUtility.PatchBufferMinMaxRanges(bufferRangePatchData, UnsafeUtility.AddressOf(ref jobData), begin, end - begin);
#endif
jobData.Execute(begin, end - begin);
}
}
}
///
/// Gathers and caches reflection data for the internal job system's managed bindings. Unity is responsible for calling this method - don't call it yourself.
///
///
///
/// When the Jobs package is included in the project, Unity generates code to call EarlyJobInit at startup. This allows Burst compiled code to schedule jobs because the reflection part of initialization, which is not compatible with burst compiler constraints, has already happened in EarlyJobInit.
///
/// __Note__: While the Jobs package code generator handles this automatically for all closed job types, you must register those with generic arguments (like IJobParallelForBatch<MyJobType<T>>) manually for each specialization with [[Unity.Jobs.RegisterGenericJobTypeAttribute]].
///
public static void EarlyJobInit()
where T : struct, IJobParallelForBatch
{
JobParallelForBatchProducer.Initialize();
}
static IntPtr GetReflectionData()
where T : struct, IJobParallelForBatch
{
JobParallelForBatchProducer.Initialize();
var reflectionData = JobParallelForBatchProducer.jobReflectionData.Data;
CollectionHelper.CheckReflectionDataCorrect(reflectionData);
return reflectionData;
}
///
/// Schedules a job that will execute the parallel batch job for all `arrayLength` elements in batches of `indicesPerJobCount`.
/// The Execute() method for Job T will be provided the start index and number of elements to safely operate on.
/// In cases where `indicesPerJobCount` is not a multiple of `arrayLength`, the `count` provided to the Execute method of Job T will be smaller than the `indicesPerJobCount` specified here.
///
/// The job and data to schedule.
/// Total number of elements to consider when batching.
/// Number of elements to consider in a single parallel batch.
/// Dependencies are used to ensure that a job executes on workerthreads after the dependency has completed execution. Making sure that two jobs reading or writing to same data do not run in parallel.
/// JobHandle The handle identifying the scheduled job. Can be used as a dependency for a later job or ensure completion on the main thread.
/// Job type
public static unsafe JobHandle Schedule(this T jobData, int arrayLength, int indicesPerJobCount,
JobHandle dependsOn = new JobHandle()) where T : struct, IJobParallelForBatch
{
var scheduleParams = new JobsUtility.JobScheduleParameters(UnsafeUtility.AddressOf(ref jobData), GetReflectionData(), dependsOn, ScheduleMode.Single);
return JobsUtility.ScheduleParallelFor(ref scheduleParams, arrayLength, indicesPerJobCount);
}
///
/// Schedules a job that will execute the parallel batch job for all `arrayLength` elements in batches of `indicesPerJobCount`.
/// The Execute() method for Job T will be provided the start index and number of elements to safely operate on.
/// In cases where `indicesPerJobCount` is not a multiple of `arrayLength`, the `count` provided to the Execute method of Job T will be smaller than the `indicesPerJobCount` specified here.
///
/// The job and data to schedule. In this variant, the jobData is
/// passed by reference, which may be necessary for unusually large job structs.
/// Total number of elements to consider when batching.
/// Number of elements to consider in a single parallel batch.
/// Dependencies are used to ensure that a job executes on workerthreads after the dependency has completed execution. Making sure that two jobs reading or writing to same data do not run in parallel.
/// JobHandle The handle identifying the scheduled job. Can be used as a dependency for a later job or ensure completion on the main thread.
/// Job type
public static unsafe JobHandle ScheduleByRef(this ref T jobData, int arrayLength, int indicesPerJobCount,
JobHandle dependsOn = new JobHandle()) where T : struct, IJobParallelForBatch
{
var scheduleParams = new JobsUtility.JobScheduleParameters(UnsafeUtility.AddressOf(ref jobData), GetReflectionData(), dependsOn, ScheduleMode.Single);
return JobsUtility.ScheduleParallelFor(ref scheduleParams, arrayLength, indicesPerJobCount);
}
///
/// Schedules a job that will execute the parallel batch job for all `arrayLength` elements in batches of `indicesPerJobCount`.
/// The Execute() method for Job T will be provided the start index and number of elements to safely operate on.
/// In cases where `indicesPerJobCount` is not a multiple of `arrayLength`, the `count` provided to the Execute method of Job T will be smaller than the `indicesPerJobCount` specified here.
///
/// The job and data to schedule.
/// Total number of elements to consider when batching.
/// Number of elements to consider in a single parallel batch.
/// Dependencies are used to ensure that a job executes on workerthreads after the dependency has completed execution. Making sure that two jobs reading or writing to same data do not run in parallel.
/// JobHandle The handle identifying the scheduled job. Can be used as a dependency for a later job or ensure completion on the main thread.
/// Job type
public static unsafe JobHandle ScheduleParallel(this T jobData, int arrayLength, int indicesPerJobCount,
JobHandle dependsOn = new JobHandle()) where T : struct, IJobParallelForBatch
{
var scheduleParams = new JobsUtility.JobScheduleParameters(UnsafeUtility.AddressOf(ref jobData), GetReflectionData(), dependsOn, ScheduleMode.Parallel);
return JobsUtility.ScheduleParallelFor(ref scheduleParams, arrayLength, indicesPerJobCount);
}
///
/// Schedules a job that will execute the parallel batch job for all `arrayLength` elements in batches of `indicesPerJobCount`.
/// The Execute() method for Job T will be provided the start index and number of elements to safely operate on.
/// In cases where `indicesPerJobCount` is not a multiple of `arrayLength`, the `count` provided to the Execute method of Job T will be smaller than the `indicesPerJobCount` specified here.
///
/// The job and data to schedule. In this variant, the jobData is
/// passed by reference, which may be necessary for unusually large job structs.
/// Total number of elements to consider when batching.
/// Number of elements to consider in a single parallel batch.
/// Dependencies are used to ensure that a job executes on workerthreads after the dependency has completed execution. Making sure that two jobs reading or writing to same data do not run in parallel.
/// JobHandle The handle identifying the scheduled job. Can be used as a dependency for a later job or ensure completion on the main thread.
/// Job type
public static unsafe JobHandle ScheduleParallelByRef(this ref T jobData, int arrayLength, int indicesPerJobCount,
JobHandle dependsOn = new JobHandle()) where T : struct, IJobParallelForBatch
{
var scheduleParams = new JobsUtility.JobScheduleParameters(UnsafeUtility.AddressOf(ref jobData), GetReflectionData(), dependsOn, ScheduleMode.Parallel);
return JobsUtility.ScheduleParallelFor(ref scheduleParams, arrayLength, indicesPerJobCount);
}
///
/// Schedules a job that will execute the parallel batch job for all `arrayLength` elements in batches of `indicesPerJobCount`.
/// The Execute() method for Job T will be provided the start index and number of elements to safely operate on.
/// In cases where `indicesPerJobCount` is not a multiple of `arrayLength`, the `count` provided to the Execute method of Job T will be smaller than the `indicesPerJobCount` specified here.
///
/// The job and data to schedule.
/// Total number of elements to consider when batching.
/// Number of elements to consider in a single parallel batch.
/// Dependencies are used to ensure that a job executes on workerthreads after the dependency has completed execution. Making sure that two jobs reading or writing to same data do not run in parallel.
/// JobHandle The handle identifying the scheduled job. Can be used as a dependency for a later job or ensure completion on the main thread.
/// Job type
public static unsafe JobHandle ScheduleBatch(this T jobData, int arrayLength, int indicesPerJobCount,
JobHandle dependsOn = new JobHandle()) where T : struct, IJobParallelForBatch
{
return ScheduleParallel(jobData, arrayLength, indicesPerJobCount, dependsOn);
}
///
/// Schedules a job that will execute the parallel batch job for all `arrayLength` elements in batches of `indicesPerJobCount`.
/// The Execute() method for Job T will be provided the start index and number of elements to safely operate on.
/// In cases where `indicesPerJobCount` is not a multiple of `arrayLength`, the `count` provided to the Execute method of Job T will be smaller than the `indicesPerJobCount` specified here.
///
/// The job and data to schedule. In this variant, the jobData is
/// passed by reference, which may be necessary for unusually large job structs.
/// Total number of elements to consider when batching.
/// Number of elements to consider in a single parallel batch.
/// Dependencies are used to ensure that a job executes on workerthreads after the dependency has completed execution. Making sure that two jobs reading or writing to same data do not run in parallel.
/// JobHandle The handle identifying the scheduled job. Can be used as a dependency for a later job or ensure completion on the main thread.
/// Job type
public static unsafe JobHandle ScheduleBatchByRef(this ref T jobData, int arrayLength, int indicesPerJobCount,
JobHandle dependsOn = new JobHandle()) where T : struct, IJobParallelForBatch
{
return ScheduleParallelByRef(ref jobData, arrayLength, indicesPerJobCount, dependsOn);
}
///
/// Executes the parallel batch job but on the main thread. See IJobParallelForBatchExtensions.Schedule for more information on how appending is performed.
///
/// The job and data to schedule.
/// Total number of elements to consider when batching.
/// Number of elements to consider in a single parallel batch. This argument is ignored when using .Run()
/// Job type
///
/// Unlike Schedule, since the job is running on the main thread no parallelization occurs and thus no `indicesPerJobCount` batch size is required to be specified.
///
public static unsafe void Run(this T jobData, int arrayLength, int indicesPerJobCount) where T : struct, IJobParallelForBatch
{
var scheduleParams = new JobsUtility.JobScheduleParameters(UnsafeUtility.AddressOf(ref jobData), GetReflectionData(), new JobHandle(), ScheduleMode.Run);
JobsUtility.ScheduleParallelFor(ref scheduleParams, arrayLength, arrayLength);
}
///
/// Executes the parallel batch job but on the main thread. See IJobParallelForBatchExtensions.Schedule for more information on how appending is performed.
///
/// The job and data to schedule. In this variant, the jobData is
/// passed by reference, which may be necessary for unusually large job structs.
/// Total number of elements to consider when batching.
/// Number of elements to consider in a single parallel batch. This argument is ignored when using .RunByRef()
/// Job type
public static unsafe void RunByRef(this ref T jobData, int arrayLength, int indicesPerJobCount) where T : struct, IJobParallelForBatch
{
var scheduleParams = new JobsUtility.JobScheduleParameters(UnsafeUtility.AddressOf(ref jobData), GetReflectionData(), new JobHandle(), ScheduleMode.Run);
JobsUtility.ScheduleParallelFor(ref scheduleParams, arrayLength, arrayLength);
}
///
/// Executes the parallel batch job but on the main thread. See IJobParallelForBatchExtensions.ScheduleBatch for more information on how appending is performed.
///
/// The job and data to schedule.
/// Total number of elements to consider when batching.
/// Job type
///
/// Unlike ScheduleBatch, since the job is running on the main thread no parallelization occurs and thus no `indicesPerJobCount` batch size is required to be specified.
///
public static unsafe void RunBatch(this T jobData, int arrayLength) where T : struct, IJobParallelForBatch
{
Run(jobData, arrayLength, arrayLength);
}
///
/// Executes the parallel batch job but on the main thread. See IJobParallelForBatchExtensions.ScheduleBatch for more information on how appending is performed.
///
/// The job and data to schedule. In this variant, the jobData is
/// passed by reference, which may be necessary for unusually large job structs.
/// Total number of elements to consider when batching.
/// Job type
public static unsafe void RunBatchByRef(this ref T jobData, int arrayLength) where T : struct, IJobParallelForBatch
{
RunByRef(ref jobData, arrayLength, arrayLength);
}
}
}