Hello,
i have a problem with DSP_READ_CALLBACK, i have 6 in/out channels from mic and 4 of them is zeros, but and the other two are just copies of each other. What does this duplicate data mean? to get total data from a mono channel, do I need to sum all 6 channels? I need this data to use in a voice recognizer, so I only need one channel.
Image from inspector with raw data: first is mic data, second is copy of first and other 4 is zeros

public void EnableRecording()
{
    if (_isRecording == false)
    {
        RESULT res = RuntimeManager.CoreSystem.getRecordNumDrivers(out int numofDrivers, out int numOfDriversConnected);
        RuntimeManager.CoreSystem.getRecordDriverInfo(RecordDeviceId, out _, 0,
            out Guid micGUID, out _sampleRate, out SPEAKERMODE speakerMode, out int captureNumChannels, out DRIVER_STATE driverState);
        _driftThreshold = (uint)(_sampleRate * DRIFT_MS) / 1000;       /* The point where we start compensating for drift */
        _desiredLatency = (uint)(_sampleRate * LATENSY_MS) / 1000;     /* User specified latency */
        _adjustedLatency = _desiredLatency;                      /* User specified latency adjusted for driver update granularity */
        _actualLatency = _desiredLatency;                                 /* Latency measured once playback begins (smoothened for jitter) */
        // create sound where capture is recorded
        _exinfo.cbsize = Marshal.SizeOf(typeof(CREATESOUNDEXINFO));
        _exinfo.numchannels = captureNumChannels;
        _exinfo.format = SOUND_FORMAT.PCM16;
        _exinfo.defaultfrequency = _sampleRate;
        _exinfo.length = (uint)_sampleRate * sizeof(short) * (uint)captureNumChannels;
        UnityEngine.Debug.Log($"speakerMode: {speakerMode}");
        UnityEngine.Debug.Log($"_exinfo.cbsize: {_exinfo.cbsize}");
        UnityEngine.Debug.Log($"_exinfo.numchannels: {_exinfo.numchannels}");
        UnityEngine.Debug.Log($"_exinfo.format: {_exinfo.format}");
        UnityEngine.Debug.Log($"_exinfo.defaultfrequency: {_exinfo.defaultfrequency}");
        UnityEngine.Debug.Log($"_exinfo.length: {_exinfo.length}");
        UnityEngine.Debug.Log($"_captureRate: {_sampleRate}");
        RuntimeManager.CoreSystem.createSound(
            _exinfo.userdata, 
            MODE.LOOP_NORMAL | MODE.OPENUSER, 
            ref _exinfo, out _sound);
        RuntimeManager.CoreSystem.recordStart(RecordDeviceId, _sound, true);
        _sound.getLength(out soundLength, TIMEUNIT.PCM);
        UnityEngine.Debug.Log($"soundLength: {soundLength}");
        // play sound on dedicated channel in master channel group
        if (RuntimeManager.CoreSystem.getMasterChannelGroup(out _masterCG) != RESULT.OK)
            UnityEngine.Debug.LogWarningFormat("FMOD: Unable to create a master channel group: masterCG");
        RuntimeManager.CoreSystem.getMasterChannelGroup(out _masterCG);
        RuntimeManager.CoreSystem.playSound(_sound, _masterCG, true, out _channel);
        _channel.setPaused(true);
        // Assign the callback to a member variable to avoid garbage collection
        _readCallback = CaptureDSPReadCallback;
        // Allocate a data buffer large enough for 8 channels, pin the memory to avoid garbage collection
        RuntimeManager.CoreSystem.getDSPBufferSize(out uint bufferLength, out int numBuffers);
        _8channelsSampleBuffer = new float[bufferLength * 8];
        _sampleFloatBuffer = new float[bufferLength * captureNumChannels];
        _sampleInt16Buffer = new short[bufferLength * captureNumChannels];
        // Get a handle to this object to pass into the callback
        _objHandle = GCHandle.Alloc(this);
        if (_objHandle != null)
        {
            // Define a basic DSP that receives a callback each mix to capture audio
            DSP_DESCRIPTION desc = new DSP_DESCRIPTION();
            desc.numinputbuffers = 1;
            desc.numoutputbuffers = 1;
            desc.read = _readCallback;
            desc.userdata = GCHandle.ToIntPtr(_objHandle);
            // Create an instance of the capture DSP and attach it to the master channel group to capture all audio            
            if (RuntimeManager.CoreSystem.createDSP(ref desc, out _captureDSP) == RESULT.OK)
            {
                if (_masterCG.addDSP(0, _captureDSP) != RESULT.OK)
                {
                    UnityEngine.Debug.LogWarningFormat("FMOD: Unable to add mCaptureDSP to the master channel group");
                }
            }
            else
            {
                UnityEngine.Debug.LogWarningFormat("FMOD: Unable to create a DSP: mCaptureDSP");
            }
        }
        else
        {
            UnityEngine.Debug.LogWarningFormat("FMOD: Unable to create a GCHandle: mObjHandle");
        }
    }
}
public void DisableRecording()
{
    if (_isRecording)
    {
        if (_objHandle != null)
        {
            if (RuntimeManager.CoreSystem.getMasterChannelGroup(out ChannelGroup masterCG) == RESULT.OK)
            {
                if (_captureDSP.hasHandle())
                {
                    masterCG.removeDSP(_captureDSP);
                    _captureDSP.release();
                }
            }
            if (_objHandle.IsAllocated)
            {
                _objHandle.Free();
            }
        }
        _lastPlayPos = 0;
        _samplesPlayed = 0;
        _lastRecordPos = 0;
        _samplesRecorded = 0;
        _channel.setPaused(true);
        _isRecording = false;
        _sound.release();
        RuntimeManager.CoreSystem.recordStop(RecordDeviceId);
    }
}
[AOT.MonoPInvokeCallback(typeof(DSP_READ_CALLBACK))]
private RESULT CaptureDSPReadCallback(ref DSP_STATE dsp_state, IntPtr inbuffer, IntPtr outbuffer, uint length, int inchannels, ref int outchannels)
{
    DSP_STATE_FUNCTIONS functions = (DSP_STATE_FUNCTIONS)Marshal.PtrToStructure(dsp_state.functions, typeof(DSP_STATE_FUNCTIONS));
    functions.getuserdata(ref dsp_state, out IntPtr userData);
    GCHandle objHandle = GCHandle.FromIntPtr(userData);
    FMODVoiceProcessor obj = objHandle.Target as FMODVoiceProcessor;
    // Copy the incoming buffer to process later
    int lengthElements = (int)length * inchannels;
    Marshal.Copy(inbuffer, obj._8channelsSampleBuffer, 0, lengthElements);
    // Copy the inbuffer to the outbuffer so we can still hear it
    Marshal.Copy(obj._8channelsSampleBuffer, 0, outbuffer, lengthElements);
    for (int i = 0; i < _sampleFloatBuffer.Length; i++)
    {
        _sampleFloatBuffer[i] = _8channelsSampleBuffer[i * inchannels];
    }
    for (int i = 0; i < _sampleFloatBuffer.Length; i++)
    {
        _sampleInt16Buffer[i] = (short)Math.Floor(_sampleFloatBuffer[i] * short.MaxValue);
    }
    FrameCaptured?.Invoke(_sampleInt16Buffer);
    return RESULT.OK;
}
private void OnEnable()
{
    EnableRecording();
}
private void OnDisable()
{
    DisableRecording();
}