Compare commits

...

6 Commits

Author SHA1 Message Date
gdkchan
952f6f8a65 Calculate vertex buffer size from index buffer type (#3253)
* Calculate vertex buffer size from index buffer type

* We also need to update the size if first vertex changes
2022-04-08 11:02:06 +02:00
Mary
d04ba51bb0 amadeus: Improve and fix delay effect processing (#3205)
* amadeus: Improve and fix delay effect processing

This rework the delay effect processing by representing calculation with the appropriate matrix and by unrolling some loop in the code.
This allows better optimization by the JIT while making it more readeable.

Also fix a bug in the Surround code path found while looking back at my notes.

* Remove useless GetHashCode

* Address gdkchan's comments
2022-04-08 10:52:18 +02:00
Narr the Reg
55ee261363 service: hid: Signal event on AcquireNpadStyleSetUpdateEventHandle (#3247) 2022-04-07 15:43:14 -03:00
Alex Barney
4e3a34412e Update to LibHac 0.16.1 (#3263) 2022-04-07 15:18:14 -03:00
Mary
3f4fb8f73a amadeus: Update to REV11 (#3230)
This should implement all ABI changes from REV11 on 14.0.0

As Nintendo changed the channel disposition for "legacy" effects (Delay, Reverb and Reverb 3D) to match the standard channel mapping, I took the liberty to just remap to the old disposition for now.
The proper changes will be handled at a later date with a complete rewriting of those 3 effects to be more readable (see https://github.com/Ryujinx/Ryujinx/pull/3205 for the first iteration of it).
2022-04-06 09:12:38 +02:00
gdkchan
56c56aa34d Do not clamp SNorm outputs to the [0, 1] range on OpenGL (#3260) 2022-04-05 18:09:06 -03:00
21 changed files with 762 additions and 95 deletions

View File

@@ -18,8 +18,10 @@
using Ryujinx.Audio.Renderer.Dsp.State; using Ryujinx.Audio.Renderer.Dsp.State;
using Ryujinx.Audio.Renderer.Parameter.Effect; using Ryujinx.Audio.Renderer.Parameter.Effect;
using Ryujinx.Audio.Renderer.Server.Effect; using Ryujinx.Audio.Renderer.Server.Effect;
using Ryujinx.Audio.Renderer.Utils.Math;
using System; using System;
using System.Diagnostics; using System.Diagnostics;
using System.Numerics;
using System.Runtime.CompilerServices; using System.Runtime.CompilerServices;
namespace Ryujinx.Audio.Renderer.Dsp.Command namespace Ryujinx.Audio.Renderer.Dsp.Command
@@ -45,7 +47,7 @@ namespace Ryujinx.Audio.Renderer.Dsp.Command
private const int FixedPointPrecision = 14; private const int FixedPointPrecision = 14;
public DelayCommand(uint bufferOffset, DelayParameter parameter, Memory<DelayState> state, bool isEnabled, ulong workBuffer, int nodeId) public DelayCommand(uint bufferOffset, DelayParameter parameter, Memory<DelayState> state, bool isEnabled, ulong workBuffer, int nodeId, bool newEffectChannelMappingSupported)
{ {
Enabled = true; Enabled = true;
NodeId = nodeId; NodeId = nodeId;
@@ -63,9 +65,14 @@ namespace Ryujinx.Audio.Renderer.Dsp.Command
InputBufferIndices[i] = (ushort)(bufferOffset + Parameter.Input[i]); InputBufferIndices[i] = (ushort)(bufferOffset + Parameter.Input[i]);
OutputBufferIndices[i] = (ushort)(bufferOffset + Parameter.Output[i]); OutputBufferIndices[i] = (ushort)(bufferOffset + Parameter.Output[i]);
} }
// NOTE: We do the opposite as Nintendo here for now to restore previous behaviour
// TODO: Update delay processing and remove this to use RemapLegacyChannelEffectMappingToChannelResourceMapping.
DataSourceHelper.RemapChannelResourceMappingToLegacy(newEffectChannelMappingSupported, InputBufferIndices);
DataSourceHelper.RemapChannelResourceMappingToLegacy(newEffectChannelMappingSupported, OutputBufferIndices);
} }
[MethodImpl(MethodImplOptions.AggressiveInlining)] [MethodImpl(MethodImplOptions.AggressiveInlining | MethodImplOptions.AggressiveOptimization)]
private unsafe void ProcessDelayMono(ref DelayState state, float* outputBuffer, float* inputBuffer, uint sampleCount) private unsafe void ProcessDelayMono(ref DelayState state, float* outputBuffer, float* inputBuffer, uint sampleCount)
{ {
float feedbackGain = FixedPointHelper.ToFloat(Parameter.FeedbackGain, FixedPointPrecision); float feedbackGain = FixedPointHelper.ToFloat(Parameter.FeedbackGain, FixedPointPrecision);
@@ -78,133 +85,148 @@ namespace Ryujinx.Audio.Renderer.Dsp.Command
float input = inputBuffer[i] * 64; float input = inputBuffer[i] * 64;
float delayLineValue = state.DelayLines[0].Read(); float delayLineValue = state.DelayLines[0].Read();
float lowPassResult = (input * inGain + delayLineValue * feedbackGain) * state.LowPassBaseGain + state.LowPassZ[0] * state.LowPassFeedbackGain; float temp = input * inGain + delayLineValue * feedbackGain;
state.LowPassZ[0] = lowPassResult; state.UpdateLowPassFilter(ref temp, 1);
state.DelayLines[0].Update(lowPassResult);
outputBuffer[i] = (input * dryGain + delayLineValue * outGain) / 64; outputBuffer[i] = (input * dryGain + delayLineValue * outGain) / 64;
} }
} }
[MethodImpl(MethodImplOptions.AggressiveInlining)] [MethodImpl(MethodImplOptions.AggressiveInlining | MethodImplOptions.AggressiveOptimization)]
private unsafe void ProcessDelayStereo(ref DelayState state, Span<IntPtr> outputBuffers, ReadOnlySpan<IntPtr> inputBuffers, uint sampleCount) private unsafe void ProcessDelayStereo(ref DelayState state, Span<IntPtr> outputBuffers, ReadOnlySpan<IntPtr> inputBuffers, uint sampleCount)
{ {
const ushort channelCount = 2; const ushort channelCount = 2;
Span<float> channelInput = stackalloc float[channelCount];
Span<float> delayLineValues = stackalloc float[channelCount];
Span<float> temp = stackalloc float[channelCount];
float delayFeedbackBaseGain = state.DelayFeedbackBaseGain; float delayFeedbackBaseGain = state.DelayFeedbackBaseGain;
float delayFeedbackCrossGain = state.DelayFeedbackCrossGain; float delayFeedbackCrossGain = state.DelayFeedbackCrossGain;
float inGain = FixedPointHelper.ToFloat(Parameter.InGain, FixedPointPrecision); float inGain = FixedPointHelper.ToFloat(Parameter.InGain, FixedPointPrecision);
float dryGain = FixedPointHelper.ToFloat(Parameter.DryGain, FixedPointPrecision); float dryGain = FixedPointHelper.ToFloat(Parameter.DryGain, FixedPointPrecision);
float outGain = FixedPointHelper.ToFloat(Parameter.OutGain, FixedPointPrecision); float outGain = FixedPointHelper.ToFloat(Parameter.OutGain, FixedPointPrecision);
Matrix2x2 delayFeedback = new Matrix2x2(delayFeedbackBaseGain , delayFeedbackCrossGain,
delayFeedbackCrossGain, delayFeedbackBaseGain);
for (int i = 0; i < sampleCount; i++) for (int i = 0; i < sampleCount; i++)
{ {
for (int j = 0; j < channelCount; j++) Vector2 channelInput = new Vector2
{ {
channelInput[j] = *((float*)inputBuffers[j] + i) * 64; X = *((float*)inputBuffers[0] + i) * 64,
delayLineValues[j] = state.DelayLines[j].Read(); Y = *((float*)inputBuffers[1] + i) * 64,
} };
temp[0] = channelInput[0] * inGain + delayLineValues[1] * delayFeedbackCrossGain + delayLineValues[0] * delayFeedbackBaseGain; Vector2 delayLineValues = new Vector2()
temp[1] = channelInput[1] * inGain + delayLineValues[0] * delayFeedbackCrossGain + delayLineValues[1] * delayFeedbackBaseGain;
for (int j = 0; j < channelCount; j++)
{ {
float lowPassResult = state.LowPassFeedbackGain * state.LowPassZ[j] + temp[j] * state.LowPassBaseGain; X = state.DelayLines[0].Read(),
Y = state.DelayLines[1].Read(),
};
state.LowPassZ[j] = lowPassResult; Vector2 temp = MatrixHelper.Transform(ref channelInput, ref delayFeedback) + channelInput * inGain;
state.DelayLines[j].Update(lowPassResult);
*((float*)outputBuffers[j] + i) = (channelInput[j] * dryGain + delayLineValues[j] * outGain) / 64; state.UpdateLowPassFilter(ref Unsafe.As<Vector2, float>(ref temp), channelCount);
}
*((float*)outputBuffers[0] + i) = (channelInput.X * dryGain + delayLineValues.X * outGain) / 64;
*((float*)outputBuffers[1] + i) = (channelInput.Y * dryGain + delayLineValues.Y * outGain) / 64;
} }
} }
[MethodImpl(MethodImplOptions.AggressiveInlining)] [MethodImpl(MethodImplOptions.AggressiveInlining | MethodImplOptions.AggressiveOptimization)]
private unsafe void ProcessDelayQuadraphonic(ref DelayState state, Span<IntPtr> outputBuffers, ReadOnlySpan<IntPtr> inputBuffers, uint sampleCount) private unsafe void ProcessDelayQuadraphonic(ref DelayState state, Span<IntPtr> outputBuffers, ReadOnlySpan<IntPtr> inputBuffers, uint sampleCount)
{ {
const ushort channelCount = 4; const ushort channelCount = 4;
Span<float> channelInput = stackalloc float[channelCount];
Span<float> delayLineValues = stackalloc float[channelCount];
Span<float> temp = stackalloc float[channelCount];
float delayFeedbackBaseGain = state.DelayFeedbackBaseGain; float delayFeedbackBaseGain = state.DelayFeedbackBaseGain;
float delayFeedbackCrossGain = state.DelayFeedbackCrossGain; float delayFeedbackCrossGain = state.DelayFeedbackCrossGain;
float inGain = FixedPointHelper.ToFloat(Parameter.InGain, FixedPointPrecision); float inGain = FixedPointHelper.ToFloat(Parameter.InGain, FixedPointPrecision);
float dryGain = FixedPointHelper.ToFloat(Parameter.DryGain, FixedPointPrecision); float dryGain = FixedPointHelper.ToFloat(Parameter.DryGain, FixedPointPrecision);
float outGain = FixedPointHelper.ToFloat(Parameter.OutGain, FixedPointPrecision); float outGain = FixedPointHelper.ToFloat(Parameter.OutGain, FixedPointPrecision);
Matrix4x4 delayFeedback = new Matrix4x4(delayFeedbackBaseGain , delayFeedbackCrossGain, delayFeedbackCrossGain, 0.0f,
delayFeedbackCrossGain, delayFeedbackBaseGain , 0.0f , delayFeedbackCrossGain,
delayFeedbackCrossGain, 0.0f , delayFeedbackBaseGain , delayFeedbackCrossGain,
0.0f , delayFeedbackCrossGain, delayFeedbackCrossGain, delayFeedbackBaseGain);
for (int i = 0; i < sampleCount; i++) for (int i = 0; i < sampleCount; i++)
{ {
for (int j = 0; j < channelCount; j++) Vector4 channelInput = new Vector4
{ {
channelInput[j] = *((float*)inputBuffers[j] + i) * 64; X = *((float*)inputBuffers[0] + i) * 64,
delayLineValues[j] = state.DelayLines[j].Read(); Y = *((float*)inputBuffers[1] + i) * 64,
} Z = *((float*)inputBuffers[2] + i) * 64,
W = *((float*)inputBuffers[3] + i) * 64
};
temp[0] = channelInput[0] * inGain + (delayLineValues[2] + delayLineValues[1]) * delayFeedbackCrossGain + delayLineValues[0] * delayFeedbackBaseGain; Vector4 delayLineValues = new Vector4()
temp[1] = channelInput[1] * inGain + (delayLineValues[0] + delayLineValues[3]) * delayFeedbackCrossGain + delayLineValues[1] * delayFeedbackBaseGain;
temp[2] = channelInput[2] * inGain + (delayLineValues[3] + delayLineValues[0]) * delayFeedbackCrossGain + delayLineValues[2] * delayFeedbackBaseGain;
temp[3] = channelInput[3] * inGain + (delayLineValues[1] + delayLineValues[2]) * delayFeedbackCrossGain + delayLineValues[3] * delayFeedbackBaseGain;
for (int j = 0; j < channelCount; j++)
{ {
float lowPassResult = state.LowPassFeedbackGain * state.LowPassZ[j] + temp[j] * state.LowPassBaseGain; X = state.DelayLines[0].Read(),
Y = state.DelayLines[1].Read(),
Z = state.DelayLines[2].Read(),
W = state.DelayLines[3].Read()
};
state.LowPassZ[j] = lowPassResult; Vector4 temp = MatrixHelper.Transform(ref channelInput, ref delayFeedback) + channelInput * inGain;
state.DelayLines[j].Update(lowPassResult);
state.UpdateLowPassFilter(ref Unsafe.As<Vector4, float>(ref temp), channelCount);
*((float*)outputBuffers[j] + i) = (channelInput[j] * dryGain + delayLineValues[j] * outGain) / 64; *((float*)outputBuffers[0] + i) = (channelInput.X * dryGain + delayLineValues.X * outGain) / 64;
} *((float*)outputBuffers[1] + i) = (channelInput.Y * dryGain + delayLineValues.Y * outGain) / 64;
*((float*)outputBuffers[2] + i) = (channelInput.Z * dryGain + delayLineValues.Z * outGain) / 64;
*((float*)outputBuffers[3] + i) = (channelInput.W * dryGain + delayLineValues.W * outGain) / 64;
} }
} }
[MethodImpl(MethodImplOptions.AggressiveInlining)] [MethodImpl(MethodImplOptions.AggressiveInlining | MethodImplOptions.AggressiveOptimization)]
private unsafe void ProcessDelaySurround(ref DelayState state, Span<IntPtr> outputBuffers, ReadOnlySpan<IntPtr> inputBuffers, uint sampleCount) private unsafe void ProcessDelaySurround(ref DelayState state, Span<IntPtr> outputBuffers, ReadOnlySpan<IntPtr> inputBuffers, uint sampleCount)
{ {
const ushort channelCount = 6; const ushort channelCount = 6;
Span<float> channelInput = stackalloc float[channelCount]; float feedbackGain = FixedPointHelper.ToFloat(Parameter.FeedbackGain, FixedPointPrecision);
Span<float> delayLineValues = stackalloc float[channelCount];
Span<float> temp = stackalloc float[channelCount];
float delayFeedbackBaseGain = state.DelayFeedbackBaseGain; float delayFeedbackBaseGain = state.DelayFeedbackBaseGain;
float delayFeedbackCrossGain = state.DelayFeedbackCrossGain; float delayFeedbackCrossGain = state.DelayFeedbackCrossGain;
float inGain = FixedPointHelper.ToFloat(Parameter.InGain, FixedPointPrecision); float inGain = FixedPointHelper.ToFloat(Parameter.InGain, FixedPointPrecision);
float dryGain = FixedPointHelper.ToFloat(Parameter.DryGain, FixedPointPrecision); float dryGain = FixedPointHelper.ToFloat(Parameter.DryGain, FixedPointPrecision);
float outGain = FixedPointHelper.ToFloat(Parameter.OutGain, FixedPointPrecision); float outGain = FixedPointHelper.ToFloat(Parameter.OutGain, FixedPointPrecision);
Matrix6x6 delayFeedback = new Matrix6x6(delayFeedbackBaseGain , 0.0f , 0.0f , 0.0f , delayFeedbackCrossGain, delayFeedbackCrossGain,
0.0f , delayFeedbackBaseGain , 0.0f , delayFeedbackCrossGain, delayFeedbackCrossGain, 0.0f ,
delayFeedbackCrossGain, 0.0f , delayFeedbackBaseGain , delayFeedbackCrossGain, 0.0f , 0.0f ,
0.0f , delayFeedbackCrossGain, delayFeedbackCrossGain, delayFeedbackBaseGain , 0.0f , 0.0f ,
delayFeedbackCrossGain, delayFeedbackCrossGain, 0.0f , 0.0f , delayFeedbackBaseGain , 0.0f ,
0.0f , 0.0f , 0.0f , 0.0f , 0.0f , feedbackGain);
for (int i = 0; i < sampleCount; i++) for (int i = 0; i < sampleCount; i++)
{ {
for (int j = 0; j < channelCount; j++) Vector6 channelInput = new Vector6
{ {
channelInput[j] = *((float*)inputBuffers[j] + i) * 64; X = *((float*)inputBuffers[0] + i) * 64,
delayLineValues[j] = state.DelayLines[j].Read(); Y = *((float*)inputBuffers[1] + i) * 64,
} Z = *((float*)inputBuffers[2] + i) * 64,
W = *((float*)inputBuffers[3] + i) * 64,
V = *((float*)inputBuffers[4] + i) * 64,
U = *((float*)inputBuffers[5] + i) * 64
};
temp[0] = channelInput[0] * inGain + (delayLineValues[2] + delayLineValues[4]) * delayFeedbackCrossGain + delayLineValues[0] * delayFeedbackBaseGain; Vector6 delayLineValues = new Vector6
temp[1] = channelInput[1] * inGain + (delayLineValues[4] + delayLineValues[3]) * delayFeedbackCrossGain + delayLineValues[1] * delayFeedbackBaseGain;
temp[2] = channelInput[2] * inGain + (delayLineValues[3] + delayLineValues[0]) * delayFeedbackCrossGain + delayLineValues[2] * delayFeedbackBaseGain;
temp[3] = channelInput[3] * inGain + (delayLineValues[1] + delayLineValues[2]) * delayFeedbackCrossGain + delayLineValues[3] * delayFeedbackBaseGain;
temp[4] = channelInput[4] * inGain + (delayLineValues[0] + delayLineValues[1]) * delayFeedbackCrossGain + delayLineValues[4] * delayFeedbackBaseGain;
temp[5] = channelInput[5] * inGain + delayLineValues[5] * delayFeedbackBaseGain;
for (int j = 0; j < channelCount; j++)
{ {
float lowPassResult = state.LowPassFeedbackGain * state.LowPassZ[j] + temp[j] * state.LowPassBaseGain; X = state.DelayLines[0].Read(),
Y = state.DelayLines[1].Read(),
Z = state.DelayLines[2].Read(),
W = state.DelayLines[3].Read(),
V = state.DelayLines[4].Read(),
U = state.DelayLines[5].Read()
};
state.LowPassZ[j] = lowPassResult; Vector6 temp = MatrixHelper.Transform(ref channelInput, ref delayFeedback) + channelInput * inGain;
state.DelayLines[j].Update(lowPassResult);
*((float*)outputBuffers[j] + i) = (channelInput[j] * dryGain + delayLineValues[j] * outGain) / 64; state.UpdateLowPassFilter(ref Unsafe.As<Vector6, float>(ref temp), channelCount);
}
*((float*)outputBuffers[0] + i) = (channelInput.X * dryGain + delayLineValues.X * outGain) / 64;
*((float*)outputBuffers[1] + i) = (channelInput.Y * dryGain + delayLineValues.Y * outGain) / 64;
*((float*)outputBuffers[2] + i) = (channelInput.Z * dryGain + delayLineValues.Z * outGain) / 64;
*((float*)outputBuffers[3] + i) = (channelInput.W * dryGain + delayLineValues.W * outGain) / 64;
*((float*)outputBuffers[4] + i) = (channelInput.V * dryGain + delayLineValues.V * outGain) / 64;
*((float*)outputBuffers[5] + i) = (channelInput.U * dryGain + delayLineValues.U * outGain) / 64;
} }
} }

View File

@@ -63,7 +63,7 @@ namespace Ryujinx.Audio.Renderer.Dsp.Command
private Reverb3dParameter _parameter; private Reverb3dParameter _parameter;
public Reverb3dCommand(uint bufferOffset, Reverb3dParameter parameter, Memory<Reverb3dState> state, bool isEnabled, ulong workBuffer, int nodeId) public Reverb3dCommand(uint bufferOffset, Reverb3dParameter parameter, Memory<Reverb3dState> state, bool isEnabled, ulong workBuffer, int nodeId, bool newEffectChannelMappingSupported)
{ {
Enabled = true; Enabled = true;
IsEffectEnabled = isEnabled; IsEffectEnabled = isEnabled;
@@ -80,6 +80,11 @@ namespace Ryujinx.Audio.Renderer.Dsp.Command
InputBufferIndices[i] = (ushort)(bufferOffset + Parameter.Input[i]); InputBufferIndices[i] = (ushort)(bufferOffset + Parameter.Input[i]);
OutputBufferIndices[i] = (ushort)(bufferOffset + Parameter.Output[i]); OutputBufferIndices[i] = (ushort)(bufferOffset + Parameter.Output[i]);
} }
// NOTE: We do the opposite as Nintendo here for now to restore previous behaviour
// TODO: Update reverb 3d processing and remove this to use RemapLegacyChannelEffectMappingToChannelResourceMapping.
DataSourceHelper.RemapChannelResourceMappingToLegacy(newEffectChannelMappingSupported, InputBufferIndices);
DataSourceHelper.RemapChannelResourceMappingToLegacy(newEffectChannelMappingSupported, OutputBufferIndices);
} }
[MethodImpl(MethodImplOptions.AggressiveInlining)] [MethodImpl(MethodImplOptions.AggressiveInlining)]
@@ -194,7 +199,7 @@ namespace Ryujinx.Audio.Renderer.Dsp.Command
if (isSurround) if (isSurround)
{ {
*((float*)outputBuffers[4] + sampleIndex) += (outputValues[4] + state.BackLeftDelayLine.Update((values[2] - values[3]) * 0.5f) + channelInput[4] * state.DryGain); *((float*)outputBuffers[4] + sampleIndex) += (outputValues[4] + state.FrontCenterDelayLine.Update((values[2] - values[3]) * 0.5f) + channelInput[4] * state.DryGain);
} }
} }
} }

View File

@@ -66,7 +66,7 @@ namespace Ryujinx.Audio.Renderer.Dsp.Command
private const int FixedPointPrecision = 14; private const int FixedPointPrecision = 14;
public ReverbCommand(uint bufferOffset, ReverbParameter parameter, Memory<ReverbState> state, bool isEnabled, ulong workBuffer, int nodeId, bool isLongSizePreDelaySupported) public ReverbCommand(uint bufferOffset, ReverbParameter parameter, Memory<ReverbState> state, bool isEnabled, ulong workBuffer, int nodeId, bool isLongSizePreDelaySupported, bool newEffectChannelMappingSupported)
{ {
Enabled = true; Enabled = true;
IsEffectEnabled = isEnabled; IsEffectEnabled = isEnabled;
@@ -85,6 +85,11 @@ namespace Ryujinx.Audio.Renderer.Dsp.Command
} }
IsLongSizePreDelaySupported = isLongSizePreDelaySupported; IsLongSizePreDelaySupported = isLongSizePreDelaySupported;
// NOTE: We do the opposite as Nintendo here for now to restore previous behaviour
// TODO: Update reverb processing and remove this to use RemapLegacyChannelEffectMappingToChannelResourceMapping.
DataSourceHelper.RemapChannelResourceMappingToLegacy(newEffectChannelMappingSupported, InputBufferIndices);
DataSourceHelper.RemapChannelResourceMappingToLegacy(newEffectChannelMappingSupported, OutputBufferIndices);
} }
[MethodImpl(MethodImplOptions.AggressiveInlining)] [MethodImpl(MethodImplOptions.AggressiveInlining)]
@@ -214,7 +219,7 @@ namespace Ryujinx.Audio.Renderer.Dsp.Command
if (isSurround) if (isSurround)
{ {
outputValues[4] += state.BackLeftDelayLine.Update((feedbackOutputValues[2] - feedbackOutputValues[3]) * 0.5f); outputValues[4] += state.FrontCenterDelayLine.Update((feedbackOutputValues[2] - feedbackOutputValues[3]) * 0.5f);
} }
for (int channelIndex = 0; channelIndex < Parameter.ChannelCount; channelIndex++) for (int channelIndex = 0; channelIndex < Parameter.ChannelCount; channelIndex++)

View File

@@ -445,5 +445,39 @@ namespace Ryujinx.Audio.Renderer.Dsp
ToIntSlow(output, input, sampleCount); ToIntSlow(output, input, sampleCount);
} }
} }
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public static void RemapLegacyChannelEffectMappingToChannelResourceMapping(bool isSupported, Span<ushort> bufferIndices)
{
if (!isSupported && bufferIndices.Length == 6)
{
ushort backLeft = bufferIndices[2];
ushort backRight = bufferIndices[3];
ushort frontCenter = bufferIndices[4];
ushort lowFrequency = bufferIndices[5];
bufferIndices[2] = frontCenter;
bufferIndices[3] = lowFrequency;
bufferIndices[4] = backLeft;
bufferIndices[5] = backRight;
}
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public static void RemapChannelResourceMappingToLegacy(bool isSupported, Span<ushort> bufferIndices)
{
if (isSupported && bufferIndices.Length == 6)
{
ushort frontCenter = bufferIndices[2];
ushort lowFrequency = bufferIndices[3];
ushort backLeft = bufferIndices[4];
ushort backRight = bufferIndices[5];
bufferIndices[2] = backLeft;
bufferIndices[3] = backRight;
bufferIndices[4] = frontCenter;
bufferIndices[5] = lowFrequency;
}
}
} }
} }

View File

@@ -17,6 +17,7 @@
using Ryujinx.Audio.Renderer.Dsp.Effect; using Ryujinx.Audio.Renderer.Dsp.Effect;
using Ryujinx.Audio.Renderer.Parameter.Effect; using Ryujinx.Audio.Renderer.Parameter.Effect;
using System.Runtime.CompilerServices;
namespace Ryujinx.Audio.Renderer.Dsp.State namespace Ryujinx.Audio.Renderer.Dsp.State
{ {
@@ -43,7 +44,6 @@ namespace Ryujinx.Audio.Renderer.Dsp.State
{ {
DelayLines[i] = new DelayLine(sampleRate, parameter.DelayTimeMax); DelayLines[i] = new DelayLine(sampleRate, parameter.DelayTimeMax);
DelayLines[i].SetDelay(parameter.DelayTime); DelayLines[i].SetDelay(parameter.DelayTime);
LowPassZ[0] = 0;
} }
UpdateParameter(ref parameter); UpdateParameter(ref parameter);
@@ -69,5 +69,16 @@ namespace Ryujinx.Audio.Renderer.Dsp.State
LowPassFeedbackGain = 0.95f * FixedPointHelper.ToFloat(parameter.LowPassAmount, FixedPointPrecision); LowPassFeedbackGain = 0.95f * FixedPointHelper.ToFloat(parameter.LowPassAmount, FixedPointPrecision);
LowPassBaseGain = 1.0f - LowPassFeedbackGain; LowPassBaseGain = 1.0f - LowPassFeedbackGain;
} }
public void UpdateLowPassFilter(ref float tempRawRef, uint channelCount)
{
for (int i = 0; i < channelCount; i++)
{
float lowPassResult = LowPassFeedbackGain * LowPassZ[i] + Unsafe.Add(ref tempRawRef, i) * LowPassBaseGain;
LowPassZ[i] = lowPassResult;
DelayLines[i].Update(lowPassResult);
}
}
} }
} }

View File

@@ -34,7 +34,7 @@ namespace Ryujinx.Audio.Renderer.Dsp.State
public DecayDelay[] DecayDelays1 { get; } public DecayDelay[] DecayDelays1 { get; }
public DecayDelay[] DecayDelays2 { get; } public DecayDelay[] DecayDelays2 { get; }
public IDelayLine PreDelayLine { get; } public IDelayLine PreDelayLine { get; }
public IDelayLine BackLeftDelayLine { get; } public IDelayLine FrontCenterDelayLine { get; }
public float DryGain { get; private set; } public float DryGain { get; private set; }
public uint[] EarlyDelayTime { get; private set; } public uint[] EarlyDelayTime { get; private set; }
public float PreviousPreDelayValue { get; set; } public float PreviousPreDelayValue { get; set; }
@@ -69,7 +69,7 @@ namespace Ryujinx.Audio.Renderer.Dsp.State
} }
PreDelayLine = new DelayLine3d(sampleRate, 400); PreDelayLine = new DelayLine3d(sampleRate, 400);
BackLeftDelayLine = new DelayLine3d(sampleRate, 5); FrontCenterDelayLine = new DelayLine3d(sampleRate, 5);
UpdateParameter(ref parameter); UpdateParameter(ref parameter);
} }

View File

@@ -97,7 +97,7 @@ namespace Ryujinx.Audio.Renderer.Dsp.State
public DelayLine[] FdnDelayLines { get; } public DelayLine[] FdnDelayLines { get; }
public DecayDelay[] DecayDelays { get; } public DecayDelay[] DecayDelays { get; }
public DelayLine PreDelayLine { get; } public DelayLine PreDelayLine { get; }
public DelayLine BackLeftDelayLine { get; } public DelayLine FrontCenterDelayLine { get; }
public uint[] EarlyDelayTime { get; } public uint[] EarlyDelayTime { get; }
public float[] EarlyGain { get; } public float[] EarlyGain { get; }
public uint PreDelayLineDelayTime { get; private set; } public uint PreDelayLineDelayTime { get; private set; }
@@ -149,7 +149,7 @@ namespace Ryujinx.Audio.Renderer.Dsp.State
} }
PreDelayLine = new DelayLine(sampleRate, preDelayTimeMax); PreDelayLine = new DelayLine(sampleRate, preDelayTimeMax);
BackLeftDelayLine = new DelayLine(sampleRate, 5.0f); FrontCenterDelayLine = new DelayLine(sampleRate, 5.0f);
UpdateParameter(ref parameter); UpdateParameter(ref parameter);
} }

View File

@@ -363,6 +363,9 @@ namespace Ryujinx.Audio.Renderer.Server
case 4: case 4:
_commandProcessingTimeEstimator = new CommandProcessingTimeEstimatorVersion4(_sampleCount, _mixBufferCount); _commandProcessingTimeEstimator = new CommandProcessingTimeEstimatorVersion4(_sampleCount, _mixBufferCount);
break; break;
case 5:
_commandProcessingTimeEstimator = new CommandProcessingTimeEstimatorVersion5(_sampleCount, _mixBufferCount);
break;
default: default:
throw new NotImplementedException($"Unsupported processing time estimator version {_behaviourContext.GetCommandProcessingTimeEstimatorVersion()}."); throw new NotImplementedException($"Unsupported processing time estimator version {_behaviourContext.GetCommandProcessingTimeEstimatorVersion()}.");
} }

View File

@@ -107,10 +107,18 @@ namespace Ryujinx.Audio.Renderer.Server
/// <remarks>This was added in system update 13.0.0</remarks> /// <remarks>This was added in system update 13.0.0</remarks>
public const int Revision10 = 10 << 24; public const int Revision10 = 10 << 24;
/// <summary>
/// REV11:
/// The "legacy" effects (Delay, Reverb and Reverb 3D) were updated to match the standard channel mapping used by the audio renderer.
/// A new version of the command estimator was added to address timing changes caused by the legacy effects changes.
/// </summary>
/// <remarks>This was added in system update 14.0.0</remarks>
public const int Revision11 = 11 << 24;
/// <summary> /// <summary>
/// Last revision supported by the implementation. /// Last revision supported by the implementation.
/// </summary> /// </summary>
public const int LastRevision = Revision10; public const int LastRevision = Revision11;
/// <summary> /// <summary>
/// Target revision magic supported by the implementation. /// Target revision magic supported by the implementation.
@@ -366,12 +374,26 @@ namespace Ryujinx.Audio.Renderer.Server
return CheckFeatureSupported(UserRevision, BaseRevisionMagic + Revision10); return CheckFeatureSupported(UserRevision, BaseRevisionMagic + Revision10);
} }
/// <summary>
/// Check if the audio renderer should support new channel resource mapping for 5.1 on Delay, Reverb and Reverb 3D effects.
/// </summary>
/// <returns>True if the audio renderer support new channel resource mapping for 5.1.</returns>
public bool IsNewEffectChannelMappingSupported()
{
return CheckFeatureSupported(UserRevision, BaseRevisionMagic + Revision11);
}
/// <summary> /// <summary>
/// Get the version of the <see cref="ICommandProcessingTimeEstimator"/>. /// Get the version of the <see cref="ICommandProcessingTimeEstimator"/>.
/// </summary> /// </summary>
/// <returns>The version of the <see cref="ICommandProcessingTimeEstimator"/>.</returns> /// <returns>The version of the <see cref="ICommandProcessingTimeEstimator"/>.</returns>
public int GetCommandProcessingTimeEstimatorVersion() public int GetCommandProcessingTimeEstimatorVersion()
{ {
if (CheckFeatureSupported(UserRevision, BaseRevisionMagic + Revision11))
{
return 5;
}
if (CheckFeatureSupported(UserRevision, BaseRevisionMagic + Revision10)) if (CheckFeatureSupported(UserRevision, BaseRevisionMagic + Revision10))
{ {
return 4; return 4;

View File

@@ -336,11 +336,12 @@ namespace Ryujinx.Audio.Renderer.Server
/// <param name="workBuffer">The work buffer to use for processing.</param> /// <param name="workBuffer">The work buffer to use for processing.</param>
/// <param name="nodeId">The node id associated to this command.</param> /// <param name="nodeId">The node id associated to this command.</param>
/// <param name="isLongSizePreDelaySupported">If set to true, the long size pre-delay is supported.</param> /// <param name="isLongSizePreDelaySupported">If set to true, the long size pre-delay is supported.</param>
public void GenerateReverbEffect(uint bufferOffset, ReverbParameter parameter, Memory<ReverbState> state, bool isEnabled, CpuAddress workBuffer, int nodeId, bool isLongSizePreDelaySupported) /// <param name="newEffectChannelMappingSupported">If set to true, the new effect channel mapping for 5.1 is supported.</param>
public void GenerateReverbEffect(uint bufferOffset, ReverbParameter parameter, Memory<ReverbState> state, bool isEnabled, CpuAddress workBuffer, int nodeId, bool isLongSizePreDelaySupported, bool newEffectChannelMappingSupported)
{ {
if (parameter.IsChannelCountValid()) if (parameter.IsChannelCountValid())
{ {
ReverbCommand command = new ReverbCommand(bufferOffset, parameter, state, isEnabled, workBuffer, nodeId, isLongSizePreDelaySupported); ReverbCommand command = new ReverbCommand(bufferOffset, parameter, state, isEnabled, workBuffer, nodeId, isLongSizePreDelaySupported, newEffectChannelMappingSupported);
command.EstimatedProcessingTime = _commandProcessingTimeEstimator.Estimate(command); command.EstimatedProcessingTime = _commandProcessingTimeEstimator.Estimate(command);
@@ -357,11 +358,12 @@ namespace Ryujinx.Audio.Renderer.Server
/// <param name="isEnabled">Set to true if the effect should be active.</param> /// <param name="isEnabled">Set to true if the effect should be active.</param>
/// <param name="workBuffer">The work buffer to use for processing.</param> /// <param name="workBuffer">The work buffer to use for processing.</param>
/// <param name="nodeId">The node id associated to this command.</param> /// <param name="nodeId">The node id associated to this command.</param>
public void GenerateReverb3dEffect(uint bufferOffset, Reverb3dParameter parameter, Memory<Reverb3dState> state, bool isEnabled, CpuAddress workBuffer, int nodeId) /// <param name="newEffectChannelMappingSupported">If set to true, the new effect channel mapping for 5.1 is supported.</param>
public void GenerateReverb3dEffect(uint bufferOffset, Reverb3dParameter parameter, Memory<Reverb3dState> state, bool isEnabled, CpuAddress workBuffer, int nodeId, bool newEffectChannelMappingSupported)
{ {
if (parameter.IsChannelCountValid()) if (parameter.IsChannelCountValid())
{ {
Reverb3dCommand command = new Reverb3dCommand(bufferOffset, parameter, state, isEnabled, workBuffer, nodeId); Reverb3dCommand command = new Reverb3dCommand(bufferOffset, parameter, state, isEnabled, workBuffer, nodeId, newEffectChannelMappingSupported);
command.EstimatedProcessingTime = _commandProcessingTimeEstimator.Estimate(command); command.EstimatedProcessingTime = _commandProcessingTimeEstimator.Estimate(command);
@@ -379,11 +381,12 @@ namespace Ryujinx.Audio.Renderer.Server
/// <param name="isEnabled">Set to true if the effect should be active.</param> /// <param name="isEnabled">Set to true if the effect should be active.</param>
/// <param name="workBuffer">The work buffer to use for processing.</param> /// <param name="workBuffer">The work buffer to use for processing.</param>
/// <param name="nodeId">The node id associated to this command.</param> /// <param name="nodeId">The node id associated to this command.</param>
public void GenerateDelayEffect(uint bufferOffset, DelayParameter parameter, Memory<DelayState> state, bool isEnabled, CpuAddress workBuffer, int nodeId) /// <param name="newEffectChannelMappingSupported">If set to true, the new effect channel mapping for 5.1 is supported.</param>
public void GenerateDelayEffect(uint bufferOffset, DelayParameter parameter, Memory<DelayState> state, bool isEnabled, CpuAddress workBuffer, int nodeId, bool newEffectChannelMappingSupported)
{ {
if (parameter.IsChannelCountValid()) if (parameter.IsChannelCountValid())
{ {
DelayCommand command = new DelayCommand(bufferOffset, parameter, state, isEnabled, workBuffer, nodeId); DelayCommand command = new DelayCommand(bufferOffset, parameter, state, isEnabled, workBuffer, nodeId, newEffectChannelMappingSupported);
command.EstimatedProcessingTime = _commandProcessingTimeEstimator.Estimate(command); command.EstimatedProcessingTime = _commandProcessingTimeEstimator.Estimate(command);

View File

@@ -483,31 +483,31 @@ namespace Ryujinx.Audio.Renderer.Server
} }
} }
private void GenerateDelayEffect(uint bufferOffset, DelayEffect effect, int nodeId) private void GenerateDelayEffect(uint bufferOffset, DelayEffect effect, int nodeId, bool newEffectChannelMappingSupported)
{ {
Debug.Assert(effect.Type == EffectType.Delay); Debug.Assert(effect.Type == EffectType.Delay);
ulong workBuffer = effect.GetWorkBuffer(-1); ulong workBuffer = effect.GetWorkBuffer(-1);
_commandBuffer.GenerateDelayEffect(bufferOffset, effect.Parameter, effect.State, effect.IsEnabled, workBuffer, nodeId); _commandBuffer.GenerateDelayEffect(bufferOffset, effect.Parameter, effect.State, effect.IsEnabled, workBuffer, nodeId, newEffectChannelMappingSupported);
} }
private void GenerateReverbEffect(uint bufferOffset, ReverbEffect effect, int nodeId, bool isLongSizePreDelaySupported) private void GenerateReverbEffect(uint bufferOffset, ReverbEffect effect, int nodeId, bool isLongSizePreDelaySupported, bool newEffectChannelMappingSupported)
{ {
Debug.Assert(effect.Type == EffectType.Reverb); Debug.Assert(effect.Type == EffectType.Reverb);
ulong workBuffer = effect.GetWorkBuffer(-1); ulong workBuffer = effect.GetWorkBuffer(-1);
_commandBuffer.GenerateReverbEffect(bufferOffset, effect.Parameter, effect.State, effect.IsEnabled, workBuffer, nodeId, isLongSizePreDelaySupported); _commandBuffer.GenerateReverbEffect(bufferOffset, effect.Parameter, effect.State, effect.IsEnabled, workBuffer, nodeId, isLongSizePreDelaySupported, newEffectChannelMappingSupported);
} }
private void GenerateReverb3dEffect(uint bufferOffset, Reverb3dEffect effect, int nodeId) private void GenerateReverb3dEffect(uint bufferOffset, Reverb3dEffect effect, int nodeId, bool newEffectChannelMappingSupported)
{ {
Debug.Assert(effect.Type == EffectType.Reverb3d); Debug.Assert(effect.Type == EffectType.Reverb3d);
ulong workBuffer = effect.GetWorkBuffer(-1); ulong workBuffer = effect.GetWorkBuffer(-1);
_commandBuffer.GenerateReverb3dEffect(bufferOffset, effect.Parameter, effect.State, effect.IsEnabled, workBuffer, nodeId); _commandBuffer.GenerateReverb3dEffect(bufferOffset, effect.Parameter, effect.State, effect.IsEnabled, workBuffer, nodeId, newEffectChannelMappingSupported);
} }
private void GenerateBiquadFilterEffect(uint bufferOffset, BiquadFilterEffect effect, int nodeId) private void GenerateBiquadFilterEffect(uint bufferOffset, BiquadFilterEffect effect, int nodeId)
@@ -650,13 +650,13 @@ namespace Ryujinx.Audio.Renderer.Server
GenerateAuxEffect(mix.BufferOffset, (AuxiliaryBufferEffect)effect, nodeId); GenerateAuxEffect(mix.BufferOffset, (AuxiliaryBufferEffect)effect, nodeId);
break; break;
case EffectType.Delay: case EffectType.Delay:
GenerateDelayEffect(mix.BufferOffset, (DelayEffect)effect, nodeId); GenerateDelayEffect(mix.BufferOffset, (DelayEffect)effect, nodeId, _rendererContext.BehaviourContext.IsNewEffectChannelMappingSupported());
break; break;
case EffectType.Reverb: case EffectType.Reverb:
GenerateReverbEffect(mix.BufferOffset, (ReverbEffect)effect, nodeId, mix.IsLongSizePreDelaySupported); GenerateReverbEffect(mix.BufferOffset, (ReverbEffect)effect, nodeId, mix.IsLongSizePreDelaySupported, _rendererContext.BehaviourContext.IsNewEffectChannelMappingSupported());
break; break;
case EffectType.Reverb3d: case EffectType.Reverb3d:
GenerateReverb3dEffect(mix.BufferOffset, (Reverb3dEffect)effect, nodeId); GenerateReverb3dEffect(mix.BufferOffset, (Reverb3dEffect)effect, nodeId, _rendererContext.BehaviourContext.IsNewEffectChannelMappingSupported());
break; break;
case EffectType.BiquadFilter: case EffectType.BiquadFilter:
GenerateBiquadFilterEffect(mix.BufferOffset, (BiquadFilterEffect)effect, nodeId); GenerateBiquadFilterEffect(mix.BufferOffset, (BiquadFilterEffect)effect, nodeId);

View File

@@ -198,7 +198,7 @@ namespace Ryujinx.Audio.Renderer.Server
return (uint)1853.2f; return (uint)1853.2f;
} }
public uint Estimate(DelayCommand command) public virtual uint Estimate(DelayCommand command)
{ {
Debug.Assert(_sampleCount == 160 || _sampleCount == 240); Debug.Assert(_sampleCount == 160 || _sampleCount == 240);
@@ -272,7 +272,7 @@ namespace Ryujinx.Audio.Renderer.Server
} }
} }
public uint Estimate(ReverbCommand command) public virtual uint Estimate(ReverbCommand command)
{ {
Debug.Assert(_sampleCount == 160 || _sampleCount == 240); Debug.Assert(_sampleCount == 160 || _sampleCount == 240);
@@ -346,7 +346,7 @@ namespace Ryujinx.Audio.Renderer.Server
} }
} }
public uint Estimate(Reverb3dCommand command) public virtual uint Estimate(Reverb3dCommand command)
{ {
Debug.Assert(_sampleCount == 160 || _sampleCount == 240); Debug.Assert(_sampleCount == 160 || _sampleCount == 240);

View File

@@ -0,0 +1,253 @@
//
// Copyright (c) 2019-2022 Ryujinx
//
// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU Lesser General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU Lesser General Public License for more details.
//
// You should have received a copy of the GNU Lesser General Public License
// along with this program. If not, see <https://www.gnu.org/licenses/>.
//
using Ryujinx.Audio.Renderer.Dsp.Command;
using System;
using System.Diagnostics;
namespace Ryujinx.Audio.Renderer.Server
{
/// <summary>
/// <see cref="ICommandProcessingTimeEstimator"/> version 5. (added with REV11)
/// </summary>
public class CommandProcessingTimeEstimatorVersion5 : CommandProcessingTimeEstimatorVersion4
{
public CommandProcessingTimeEstimatorVersion5(uint sampleCount, uint bufferCount) : base(sampleCount, bufferCount) { }
public override uint Estimate(DelayCommand command)
{
Debug.Assert(_sampleCount == 160 || _sampleCount == 240);
if (_sampleCount == 160)
{
if (command.Enabled)
{
switch (command.Parameter.ChannelCount)
{
case 1:
return 8929;
case 2:
return 25501;
case 4:
return 47760;
case 6:
return 82203;
default:
throw new NotImplementedException($"{command.Parameter.ChannelCount}");
}
}
else
{
switch (command.Parameter.ChannelCount)
{
case 1:
return (uint)1295.20f;
case 2:
return (uint)1213.60f;
case 4:
return (uint)942.03f;
case 6:
return (uint)1001.6f;
default:
throw new NotImplementedException($"{command.Parameter.ChannelCount}");
}
}
}
if (command.Enabled)
{
switch (command.Parameter.ChannelCount)
{
case 1:
return 11941;
case 2:
return 37197;
case 4:
return 69750;
case 6:
return 12004;
default:
throw new NotImplementedException($"{command.Parameter.ChannelCount}");
}
}
else
{
switch (command.Parameter.ChannelCount)
{
case 1:
return (uint)997.67f;
case 2:
return (uint)977.63f;
case 4:
return (uint)792.31f;
case 6:
return (uint)875.43f;
default:
throw new NotImplementedException($"{command.Parameter.ChannelCount}");
}
}
}
public override uint Estimate(ReverbCommand command)
{
Debug.Assert(_sampleCount == 160 || _sampleCount == 240);
if (_sampleCount == 160)
{
if (command.Enabled)
{
switch (command.Parameter.ChannelCount)
{
case 1:
return 81475;
case 2:
return 84975;
case 4:
return 91625;
case 6:
return 95332;
default:
throw new NotImplementedException($"{command.Parameter.ChannelCount}");
}
}
else
{
switch (command.Parameter.ChannelCount)
{
case 1:
return (uint)536.30f;
case 2:
return (uint)588.80f;
case 4:
return (uint)643.70f;
case 6:
return (uint)706.0f;
default:
throw new NotImplementedException($"{command.Parameter.ChannelCount}");
}
}
}
if (command.Enabled)
{
switch (command.Parameter.ChannelCount)
{
case 1:
return 120170;
case 2:
return 125260;
case 4:
return 135750;
case 6:
return 141130;
default:
throw new NotImplementedException($"{command.Parameter.ChannelCount}");
}
}
else
{
switch (command.Parameter.ChannelCount)
{
case 1:
return (uint)617.64f;
case 2:
return (uint)659.54f;
case 4:
return (uint)711.44f;
case 6:
return (uint)778.07f;
default:
throw new NotImplementedException($"{command.Parameter.ChannelCount}");
}
}
}
public override uint Estimate(Reverb3dCommand command)
{
Debug.Assert(_sampleCount == 160 || _sampleCount == 240);
if (_sampleCount == 160)
{
if (command.Enabled)
{
switch (command.Parameter.ChannelCount)
{
case 1:
return 116750;
case 2:
return 125910;
case 4:
return 146340;
case 6:
return 165810;
default:
throw new NotImplementedException($"{command.Parameter.ChannelCount}");
}
}
else
{
switch (command.Parameter.ChannelCount)
{
case 1:
return 735;
case 2:
return (uint)766.62f;
case 4:
return (uint)834.07f;
case 6:
return (uint)875.44f;
default:
throw new NotImplementedException($"{command.Parameter.ChannelCount}");
}
}
}
if (command.Enabled)
{
switch (command.Parameter.ChannelCount)
{
case 1:
return 170290;
case 2:
return 183880;
case 4:
return 214700;
case 6:
return 243850;
default:
throw new NotImplementedException($"{command.Parameter.ChannelCount}");
}
}
else
{
switch (command.Parameter.ChannelCount)
{
case 1:
return (uint)508.47f;
case 2:
return (uint)582.45f;
case 4:
return (uint)626.42f;
case 6:
return (uint)682.47f;
default:
throw new NotImplementedException($"{command.Parameter.ChannelCount}");
}
}
}
}
}

View File

@@ -0,0 +1,71 @@
namespace Ryujinx.Audio.Renderer.Utils.Math
{
record struct Matrix2x2
{
public float M11;
public float M12;
public float M21;
public float M22;
public Matrix2x2(float m11, float m12,
float m21, float m22)
{
M11 = m11;
M12 = m12;
M21 = m21;
M22 = m22;
}
public static Matrix2x2 operator +(Matrix2x2 value1, Matrix2x2 value2)
{
Matrix2x2 m;
m.M11 = value1.M11 + value2.M11;
m.M12 = value1.M12 + value2.M12;
m.M21 = value1.M21 + value2.M21;
m.M22 = value1.M22 + value2.M22;
return m;
}
public static Matrix2x2 operator -(Matrix2x2 value1, float value2)
{
Matrix2x2 m;
m.M11 = value1.M11 - value2;
m.M12 = value1.M12 - value2;
m.M21 = value1.M21 - value2;
m.M22 = value1.M22 - value2;
return m;
}
public static Matrix2x2 operator *(Matrix2x2 value1, float value2)
{
Matrix2x2 m;
m.M11 = value1.M11 * value2;
m.M12 = value1.M12 * value2;
m.M21 = value1.M21 * value2;
m.M22 = value1.M22 * value2;
return m;
}
public static Matrix2x2 operator *(Matrix2x2 value1, Matrix2x2 value2)
{
Matrix2x2 m;
// First row
m.M11 = value1.M11 * value2.M11 + value1.M12 * value2.M21;
m.M12 = value1.M11 * value2.M12 + value1.M12 * value2.M22;
// Second row
m.M21 = value1.M21 * value2.M11 + value1.M22 * value2.M21;
m.M22 = value1.M21 * value2.M12 + value1.M22 * value2.M22;
return m;
}
}
}

View File

@@ -0,0 +1,97 @@
namespace Ryujinx.Audio.Renderer.Utils.Math
{
record struct Matrix6x6
{
public float M11;
public float M12;
public float M13;
public float M14;
public float M15;
public float M16;
public float M21;
public float M22;
public float M23;
public float M24;
public float M25;
public float M26;
public float M31;
public float M32;
public float M33;
public float M34;
public float M35;
public float M36;
public float M41;
public float M42;
public float M43;
public float M44;
public float M45;
public float M46;
public float M51;
public float M52;
public float M53;
public float M54;
public float M55;
public float M56;
public float M61;
public float M62;
public float M63;
public float M64;
public float M65;
public float M66;
public Matrix6x6(float m11, float m12, float m13, float m14, float m15, float m16,
float m21, float m22, float m23, float m24, float m25, float m26,
float m31, float m32, float m33, float m34, float m35, float m36,
float m41, float m42, float m43, float m44, float m45, float m46,
float m51, float m52, float m53, float m54, float m55, float m56,
float m61, float m62, float m63, float m64, float m65, float m66)
{
M11 = m11;
M12 = m12;
M13 = m13;
M14 = m14;
M15 = m15;
M16 = m16;
M21 = m21;
M22 = m22;
M23 = m23;
M24 = m24;
M25 = m25;
M26 = m26;
M31 = m31;
M32 = m32;
M33 = m33;
M34 = m34;
M35 = m35;
M36 = m36;
M41 = m41;
M42 = m42;
M43 = m43;
M44 = m44;
M45 = m45;
M46 = m46;
M51 = m51;
M52 = m52;
M53 = m53;
M54 = m54;
M55 = m55;
M56 = m56;
M61 = m61;
M62 = m62;
M63 = m63;
M64 = m64;
M65 = m65;
M66 = m66;
}
}
}

View File

@@ -0,0 +1,45 @@
using Ryujinx.Audio.Renderer.Utils.Math;
using System.Numerics;
using System.Runtime.CompilerServices;
namespace Ryujinx.Audio.Renderer.Dsp
{
static class MatrixHelper
{
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public static Vector6 Transform(ref Vector6 value1, ref Matrix6x6 value2)
{
return new Vector6
{
X = value2.M11 * value1.X + value2.M12 * value1.Y + value2.M13 * value1.Z + value2.M14 * value1.W + value2.M15 * value1.V + value2.M16 * value1.U,
Y = value2.M21 * value1.X + value2.M22 * value1.Y + value2.M23 * value1.Z + value2.M24 * value1.W + value2.M25 * value1.V + value2.M26 * value1.U,
Z = value2.M31 * value1.X + value2.M32 * value1.Y + value2.M33 * value1.Z + value2.M34 * value1.W + value2.M35 * value1.V + value2.M36 * value1.U,
W = value2.M41 * value1.X + value2.M42 * value1.Y + value2.M43 * value1.Z + value2.M44 * value1.W + value2.M45 * value1.V + value2.M46 * value1.U,
V = value2.M51 * value1.X + value2.M52 * value1.Y + value2.M53 * value1.Z + value2.M54 * value1.W + value2.M55 * value1.V + value2.M56 * value1.U,
U = value2.M61 * value1.X + value2.M62 * value1.Y + value2.M63 * value1.Z + value2.M64 * value1.W + value2.M65 * value1.V + value2.M66 * value1.U,
};
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public static Vector4 Transform(ref Vector4 value1, ref Matrix4x4 value2)
{
return new Vector4
{
X = value2.M11 * value1.X + value2.M12 * value1.Y + value2.M13 * value1.Z + value2.M14 * value1.W,
Y = value2.M21 * value1.X + value2.M22 * value1.Y + value2.M23 * value1.Z + value2.M24 * value1.W,
Z = value2.M31 * value1.X + value2.M32 * value1.Y + value2.M33 * value1.Z + value2.M34 * value1.W,
W = value2.M41 * value1.X + value2.M42 * value1.Y + value2.M43 * value1.Z + value2.M44 * value1.W
};
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public static Vector2 Transform(ref Vector2 value1, ref Matrix2x2 value2)
{
return new Vector2
{
X = value2.M11 * value1.X + value2.M12 * value1.Y,
Y = value2.M21 * value1.X + value2.M22 * value1.Y,
};
}
}
}

View File

@@ -0,0 +1,56 @@
using System.Runtime.CompilerServices;
namespace Ryujinx.Audio.Renderer.Utils.Math
{
record struct Vector6
{
public float X;
public float Y;
public float Z;
public float W;
public float V;
public float U;
public Vector6(float value) : this(value, value, value, value, value, value)
{
}
public Vector6(float x, float y, float z, float w, float v, float u)
{
X = x;
Y = y;
Z = z;
W = w;
V = v;
U = u;
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public static Vector6 operator +(Vector6 left, Vector6 right)
{
return new Vector6(left.X + right.X,
left.Y + right.Y,
left.Z + right.Z,
left.W + right.W,
left.V + right.V,
left.U + right.U);
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public static Vector6 operator *(Vector6 left, Vector6 right)
{
return new Vector6(left.X * right.X,
left.Y * right.Y,
left.Z * right.Z,
left.W * right.W,
left.V * right.V,
left.U * right.U);
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public static Vector6 operator *(Vector6 left, float right)
{
return left * new Vector6(right);
}
}
}

View File

@@ -35,6 +35,8 @@ namespace Ryujinx.Graphics.Gpu.Engine.Threed
private byte _vsClipDistancesWritten; private byte _vsClipDistancesWritten;
private bool _prevDrawIndexed; private bool _prevDrawIndexed;
private IndexType _prevIndexType;
private uint _prevFirstVertex;
private bool _prevTfEnable; private bool _prevTfEnable;
/// <summary> /// <summary>
@@ -214,6 +216,17 @@ namespace Ryujinx.Graphics.Gpu.Engine.Threed
_prevDrawIndexed = _drawState.DrawIndexed; _prevDrawIndexed = _drawState.DrawIndexed;
} }
// In some cases, the index type is also used to guess the
// vertex buffer size, so we must update it if the type changed too.
if (_drawState.DrawIndexed &&
(_prevIndexType != _state.State.IndexBufferState.Type ||
_prevFirstVertex != _state.State.FirstVertex))
{
_updateTracker.ForceDirty(VertexBufferStateIndex);
_prevIndexType = _state.State.IndexBufferState.Type;
_prevFirstVertex = _state.State.FirstVertex;
}
bool tfEnable = _state.State.TfEnable; bool tfEnable = _state.State.TfEnable;
if (!tfEnable && _prevTfEnable) if (!tfEnable && _prevTfEnable)
@@ -867,6 +880,9 @@ namespace Ryujinx.Graphics.Gpu.Engine.Threed
/// </summary> /// </summary>
private void UpdateVertexBufferState() private void UpdateVertexBufferState()
{ {
IndexType indexType = _state.State.IndexBufferState.Type;
bool indexTypeSmall = indexType == IndexType.UByte || indexType == IndexType.UShort;
_drawState.IsAnyVbInstanced = false; _drawState.IsAnyVbInstanced = false;
for (int index = 0; index < Constants.TotalVertexBuffers; index++) for (int index = 0; index < Constants.TotalVertexBuffers; index++)
@@ -898,12 +914,27 @@ namespace Ryujinx.Graphics.Gpu.Engine.Threed
{ {
// This size may be (much) larger than the real vertex buffer size. // This size may be (much) larger than the real vertex buffer size.
// Avoid calculating it this way, unless we don't have any other option. // Avoid calculating it this way, unless we don't have any other option.
size = endAddress.Pack() - address + 1; size = endAddress.Pack() - address + 1;
if (stride > 0 && indexTypeSmall)
{
// If the index type is a small integer type, then we might be still able
// to reduce the vertex buffer size based on the maximum possible index value.
ulong maxVertexBufferSize = indexType == IndexType.UByte ? 0x100UL : 0x10000UL;
maxVertexBufferSize += _state.State.FirstVertex;
maxVertexBufferSize *= (uint)stride;
size = Math.Min(size, maxVertexBufferSize);
}
} }
else else
{ {
// For non-indexed draws, we can guess the size from the vertex count // For non-indexed draws, we can guess the size from the vertex count
// and stride. // and stride.
int firstInstance = (int)_state.State.FirstInstance; int firstInstance = (int)_state.State.FirstInstance;
var drawState = _state.State.VertexBufferDrawState; var drawState = _state.State.VertexBufferDrawState;

View File

@@ -155,6 +155,12 @@ namespace Ryujinx.Graphics.OpenGL
_pipeline.Initialize(this); _pipeline.Initialize(this);
_counters.Initialize(); _counters.Initialize();
// This is required to disable [0, 1] clamping for SNorm outputs on compatibility profiles.
// This call is expected to fail if we're running with a core profile,
// as this clamp target was deprecated, but that's fine as a core profile
// should already have the desired behaviour were outputs are not clamped.
GL.ClampColor(ClampColorTarget.ClampFragmentColor, ClampColorMode.False);
} }
private void PrintGpuInformation() private void PrintGpuInformation()

View File

@@ -767,6 +767,9 @@ namespace Ryujinx.HLE.HOS.Services.Hid
throw new InvalidOperationException("Out of handles!"); throw new InvalidOperationException("Out of handles!");
} }
// Games expect this event to be signaled after calling this function
evnt.ReadableEvent.Signal();
context.Response.HandleDesc = IpcHandleDesc.MakeCopy(handle); context.Response.HandleDesc = IpcHandleDesc.MakeCopy(handle);
Logger.Stub?.PrintStub(LogClass.ServiceHid, new { appletResourceUserId, npadId, npadStyleSet }); Logger.Stub?.PrintStub(LogClass.ServiceHid, new { appletResourceUserId, npadId, npadStyleSet });

View File

@@ -19,7 +19,7 @@
<ItemGroup> <ItemGroup>
<PackageReference Include="Concentus" Version="1.1.7" /> <PackageReference Include="Concentus" Version="1.1.7" />
<PackageReference Include="LibHac" Version="0.16.0" /> <PackageReference Include="LibHac" Version="0.16.1" />
<PackageReference Include="MsgPack.Cli" Version="1.0.1" /> <PackageReference Include="MsgPack.Cli" Version="1.0.1" />
<PackageReference Include="SixLabors.ImageSharp" Version="1.0.4" /> <PackageReference Include="SixLabors.ImageSharp" Version="1.0.4" />
<PackageReference Include="SixLabors.ImageSharp.Drawing" Version="1.0.0-beta11" /> <PackageReference Include="SixLabors.ImageSharp.Drawing" Version="1.0.0-beta11" />