/ src / Ryujinx.Graphics.Gpu / Engine / GPFifo / GPFifoDevice.cs
GPFifoDevice.cs
  1  using Ryujinx.Graphics.Gpu.Memory;
  2  using System;
  3  using System.Collections.Concurrent;
  4  using System.Runtime.CompilerServices;
  5  using System.Runtime.InteropServices;
  6  using System.Threading;
  7  
  8  namespace Ryujinx.Graphics.Gpu.Engine.GPFifo
  9  {
 10      /// <summary>
 11      /// Represents a GPU General Purpose FIFO device.
 12      /// </summary>
 13      public sealed class GPFifoDevice : IDisposable
 14      {
 15          /// <summary>
 16          /// Indicates if the command buffer has pre-fetch enabled.
 17          /// </summary>
 18          private enum CommandBufferType
 19          {
 20              Prefetch,
 21              NoPrefetch,
 22          }
 23  
 24          /// <summary>
 25          /// Command buffer data.
 26          /// </summary>
 27          private struct CommandBuffer
 28          {
 29              /// <summary>
 30              /// Processor used to process the command buffer. Contains channel state.
 31              /// </summary>
 32              public GPFifoProcessor Processor;
 33  
 34              /// <summary>
 35              /// The type of the command buffer.
 36              /// </summary>
 37              public CommandBufferType Type;
 38  
 39              /// <summary>
 40              /// Fetched data.
 41              /// </summary>
 42              public int[] Words;
 43  
 44              /// <summary>
 45              /// The GPFIFO entry address (used in <see cref="CommandBufferType.NoPrefetch"/> mode).
 46              /// </summary>
 47              public ulong EntryAddress;
 48  
 49              /// <summary>
 50              /// The count of entries inside this GPFIFO entry.
 51              /// </summary>
 52              public uint EntryCount;
 53  
 54              /// <summary>
 55              /// Get the entries for the command buffer from memory.
 56              /// </summary>
 57              /// <param name="memoryManager">The memory manager used to fetch the data</param>
 58              /// <param name="flush">If true, flushes potential GPU written data before reading the command buffer</param>
 59              /// <returns>The fetched data</returns>
 60              private readonly ReadOnlySpan<int> GetWords(MemoryManager memoryManager, bool flush)
 61              {
 62                  return MemoryMarshal.Cast<byte, int>(memoryManager.GetSpan(EntryAddress, (int)EntryCount * 4, flush));
 63              }
 64  
 65              /// <summary>
 66              /// Prefetch the command buffer.
 67              /// </summary>
 68              /// <param name="memoryManager">The memory manager used to fetch the data</param>
 69              public void Prefetch(MemoryManager memoryManager)
 70              {
 71                  Words = GetWords(memoryManager, true).ToArray();
 72              }
 73  
 74              /// <summary>
 75              /// Fetch the command buffer.
 76              /// </summary>
 77              /// <param name="memoryManager">The memory manager used to fetch the data</param>
 78              /// <param name="flush">If true, flushes potential GPU written data before reading the command buffer</param>
 79              /// <returns>The command buffer words</returns>
 80              public readonly ReadOnlySpan<int> Fetch(MemoryManager memoryManager, bool flush)
 81              {
 82                  return Words ?? GetWords(memoryManager, flush);
 83              }
 84          }
 85  
 86          private readonly ConcurrentQueue<CommandBuffer> _commandBufferQueue;
 87  
 88          private GPFifoProcessor _prevChannelProcessor;
 89  
 90          private readonly bool _ibEnable;
 91          private readonly GpuContext _context;
 92          private readonly AutoResetEvent _event;
 93  
 94          private bool _interrupt;
 95          private int _flushSkips;
 96  
 97          /// <summary>
 98          /// Creates a new instance of the GPU General Purpose FIFO device.
 99          /// </summary>
100          /// <param name="context">GPU context that the GPFIFO belongs to</param>
101          internal GPFifoDevice(GpuContext context)
102          {
103              _commandBufferQueue = new ConcurrentQueue<CommandBuffer>();
104              _ibEnable = true;
105              _context = context;
106              _event = new AutoResetEvent(false);
107          }
108  
109          /// <summary>
110          /// Signal the FIFO that there are new entries to process.
111          /// </summary>
112          public void SignalNewEntries()
113          {
114              _event.Set();
115          }
116  
117          /// <summary>
118          /// Push a GPFIFO entry in the form of a prefetched command buffer.
119          /// It is intended to be used by nvservices to handle special cases.
120          /// </summary>
121          /// <param name="processor">Processor used to process <paramref name="commandBuffer"/></param>
122          /// <param name="commandBuffer">The command buffer containing the prefetched commands</param>
123          internal void PushHostCommandBuffer(GPFifoProcessor processor, int[] commandBuffer)
124          {
125              _commandBufferQueue.Enqueue(new CommandBuffer
126              {
127                  Processor = processor,
128                  Type = CommandBufferType.Prefetch,
129                  Words = commandBuffer,
130                  EntryAddress = ulong.MaxValue,
131                  EntryCount = (uint)commandBuffer.Length,
132              });
133          }
134  
135          /// <summary>
136          /// Create a CommandBuffer from a GPFIFO entry.
137          /// </summary>
138          /// <param name="processor">Processor used to process the command buffer pointed to by <paramref name="entry"/></param>
139          /// <param name="entry">The GPFIFO entry</param>
140          /// <returns>A new CommandBuffer based on the GPFIFO entry</returns>
141          private static CommandBuffer CreateCommandBuffer(GPFifoProcessor processor, GPEntry entry)
142          {
143              CommandBufferType type = CommandBufferType.Prefetch;
144  
145              if (entry.Entry1Sync == Entry1Sync.Wait)
146              {
147                  type = CommandBufferType.NoPrefetch;
148              }
149  
150              ulong startAddress = ((ulong)entry.Entry0Get << 2) | ((ulong)entry.Entry1GetHi << 32);
151  
152              return new CommandBuffer
153              {
154                  Processor = processor,
155                  Type = type,
156                  Words = null,
157                  EntryAddress = startAddress,
158                  EntryCount = (uint)entry.Entry1Length,
159              };
160          }
161  
162          /// <summary>
163          /// Pushes GPFIFO entries.
164          /// </summary>
165          /// <param name="processor">Processor used to process the command buffers pointed to by <paramref name="entries"/></param>
166          /// <param name="entries">GPFIFO entries</param>
167          internal void PushEntries(GPFifoProcessor processor, ReadOnlySpan<ulong> entries)
168          {
169              bool beforeBarrier = true;
170  
171              for (int index = 0; index < entries.Length; index++)
172              {
173                  ulong entry = entries[index];
174  
175                  CommandBuffer commandBuffer = CreateCommandBuffer(processor, Unsafe.As<ulong, GPEntry>(ref entry));
176  
177                  if (beforeBarrier && commandBuffer.Type == CommandBufferType.Prefetch)
178                  {
179                      commandBuffer.Prefetch(processor.MemoryManager);
180                  }
181  
182                  if (commandBuffer.Type == CommandBufferType.NoPrefetch)
183                  {
184                      beforeBarrier = false;
185                  }
186  
187                  _commandBufferQueue.Enqueue(commandBuffer);
188              }
189          }
190  
191          /// <summary>
192          /// Waits until commands are pushed to the FIFO.
193          /// </summary>
194          /// <returns>True if commands were received, false if wait timed out</returns>
195          public bool WaitForCommands()
196          {
197              return !_commandBufferQueue.IsEmpty || (_event.WaitOne(8) && !_commandBufferQueue.IsEmpty);
198          }
199  
200          /// <summary>
201          /// Processes commands pushed to the FIFO.
202          /// </summary>
203          public void DispatchCalls()
204          {
205              // Use this opportunity to also dispose any pending channels that were closed.
206              _context.RunDeferredActions();
207  
208              // Process command buffers.
209              while (_ibEnable && !_interrupt && _commandBufferQueue.TryDequeue(out CommandBuffer entry))
210              {
211                  bool flushCommandBuffer = true;
212  
213                  if (_flushSkips != 0)
214                  {
215                      _flushSkips--;
216                      flushCommandBuffer = false;
217                  }
218  
219                  ReadOnlySpan<int> words = entry.Fetch(entry.Processor.MemoryManager, flushCommandBuffer);
220  
221                  // If we are changing the current channel,
222                  // we need to force all the host state to be updated.
223                  if (_prevChannelProcessor != entry.Processor)
224                  {
225                      _prevChannelProcessor = entry.Processor;
226                      entry.Processor.ForceAllDirty();
227                  }
228  
229                  entry.Processor.Process(entry.EntryAddress, words);
230              }
231  
232              _interrupt = false;
233          }
234  
235          /// <summary>
236          /// Sets the number of flushes that should be skipped for subsequent command buffers.
237          /// </summary>
238          /// <remarks>
239          /// This can improve performance when command buffer data only needs to be consumed by the GPU.
240          /// </remarks>
241          /// <param name="count">The amount of flushes that should be skipped</param>
242          internal void SetFlushSkips(int count)
243          {
244              _flushSkips = count;
245          }
246  
247          /// <summary>
248          /// Interrupts command processing. This will break out of the DispatchCalls loop.
249          /// </summary>
250          public void Interrupt()
251          {
252              _interrupt = true;
253              _event.Set();
254          }
255  
256          /// <summary>
257          /// Disposes of resources used for GPFifo command processing.
258          /// </summary>
259          public void Dispose() => _event.Dispose();
260      }
261  }