Here is script that I use for my proj. I was stumbling into same issue and had to develop custom solution to be able use Unity Recorder and record and record Unity Audio.
Attach Camera Recorder to camera which you wanna use to be recorded. Note that it must NOT be you gameplay camera, create a new one and place it where you want.
System requires Unity Recorder to be installed. Also it requires FFMPEG to be intsalled and you will need to set path to it in the inspector. FFMPEG is used to combine recorded result.
Component separately records Video and Audio from FMOD, and then combines them into one file. You can choose whether to delete or keep original files in case you want to compose it manual.
Files are stored in Proj/Recordings
Note: Since Unity Recorder is Editor only tool, this approach wont work in build. You will also need to enable Unity Audio for propper use of Unity Recorder (typicaly disabled with FMOD)
After recording is done script waits for amount of seconds set in “waitBeforeTryToMerge” before perform merge. This is due no callback from Unity Recorder once file saved and stored on disc.
CameraRecorder classs
using System;
using UnityEngine;
using UnityEngine.InputSystem;
using System.Collections;
using System.Diagnostics;
using System.IO;
using UnityEditor.Recorder;
using UnityEditor.Recorder.Encoder;
using UnityEditor.Recorder.Input;
using Debug = UnityEngine.Debug;
namespace Recorder
{
[RequireComponent(typeof(Camera))] // Ensure a Camera component is attached
public class CameraRecorder : MonoBehaviour
{
[SerializeField] private int width = 1920;
[SerializeField] private int height = 1080;
[SerializeField] private float frameRate = 60; // Set target FPS to 60
[SerializeField] private string outputFileName = "RecordedVideo";
[SerializeField] private InputActionReference recordingAction;
[SerializeField] private bool recordAudio = true;
[SerializeField] private bool deleteSourceFiles = false;
[SerializeField] private float waitBeforeTryToMerge = 2.0f;
[SerializeField] private string pathToFFMPEG = "/opt/homebrew/bin/ffmpeg";
private Camera cameraComponent;
private RenderTexture temporaryRenderTexture;
private RecorderController recorderController;
private AudioRecorder audioRecorder;
private bool isRecording = false;
private string outputDirectory;
private string videoFilePath;
private string audioFilePath;
private void Start()
{
cameraComponent = GetComponent<Camera>();
if (recordAudio)
{
audioRecorder = new AudioRecorder();
}
recordingAction.action.Enable();
recordingAction.action.performed += context =>
{
if (isRecording)
{
StopRecording();
}
else
{
StartRecording();
}
};
}
private void SetupRecorder()
{
var fullName = Directory.GetParent(Application.dataPath)?.FullName;
if (fullName != null)
outputDirectory = Path.Combine(fullName, "Recordings");
Directory.CreateDirectory(outputDirectory);
var dateTimeSuffix = DateTime.Now.ToString("MM.dd.HH.mm.ss");
var outputFileCombined = $"{outputFileName}_{dateTimeSuffix}";
videoFilePath = Path.Combine(outputDirectory, outputFileCombined);
audioFilePath = Path.ChangeExtension(videoFilePath, ".wav");
// Initialize the temporary RenderTexture
temporaryRenderTexture = new RenderTexture(width, height, 24);
temporaryRenderTexture.format = RenderTextureFormat.ARGB32;
cameraComponent.targetTexture = temporaryRenderTexture;
var renderTextureInput = new RenderTextureInputSettings
{
RenderTexture = temporaryRenderTexture
};
var videoRecorderSettings = ScriptableObject.CreateInstance<MovieRecorderSettings>();
videoRecorderSettings.name = "CustomVideoRecorder";
videoRecorderSettings.Enabled = true;
videoRecorderSettings.EncoderSettings = new CoreEncoderSettings();
videoRecorderSettings.ImageInputSettings = renderTextureInput;
videoRecorderSettings.OutputFile = videoFilePath;
videoRecorderSettings.FrameRatePlayback = FrameRatePlayback.Constant;
videoRecorderSettings.FrameRate = frameRate;
videoRecorderSettings.CapFrameRate = true;
var recorderControllerSettings = ScriptableObject.CreateInstance<RecorderControllerSettings>();
recorderControllerSettings.AddRecorderSettings(videoRecorderSettings);
recorderControllerSettings.SetRecordModeToManual();
recorderControllerSettings.FrameRate = frameRate;
recorderControllerSettings.CapFrameRate = true;
recorderController = new RecorderController(recorderControllerSettings);
}
private void StartRecording()
{
SetupRecorder();
isRecording = true;
// Ensure the temporary RenderTexture is available
if (temporaryRenderTexture == null)
{
temporaryRenderTexture = new RenderTexture(width, height, 24);
cameraComponent.targetTexture = temporaryRenderTexture;
}
recorderController.PrepareRecording();
recorderController.StartRecording();
if (recordAudio)
{
audioRecorder.StartRecording();
}
}
private void StopRecording()
{
isRecording = false;
if (recordAudio)
{
audioRecorder.StopRecording();
}
recorderController.StopRecording();
StartCoroutine(SaveAudio());
// Clean up the temporary RenderTexture
if (temporaryRenderTexture != null)
{
cameraComponent.targetTexture = null;
Destroy(temporaryRenderTexture);
temporaryRenderTexture = null;
}
if (!recordAudio) return;
StartCoroutine(WaitForRecordingToFinish());
}
private IEnumerator WaitForRecordingToFinish()
{
yield return new WaitForSeconds(waitBeforeTryToMerge);
//We do not add extension too path initially due Recorder structure.
//It does it inside Recorder. But we need that after recording is done.
videoFilePath += ".mp4";
if (File.Exists(videoFilePath))
{
Debug.Log("Recording finished. Starting to combine video and audio.");
CombineVideoAndAudio();
}
else
{
Debug.LogError("Recorded video file was not found after it was recorded.");
}
}
private IEnumerator SaveAudio()
{
if (recordAudio)
{
audioRecorder.SaveAudioToWav(audioFilePath);
yield return null;
Debug.Log("Audio saved to " + audioFilePath);
}
Debug.Log("Recording completed.");
}
private void CombineVideoAndAudio()
{
var combinedFilePath = Path.Combine(Path.GetDirectoryName(videoFilePath),
Path.GetFileNameWithoutExtension(videoFilePath) + "_Combined" + Path.GetExtension(videoFilePath)
);
// Build the FFmpeg command
var ffmpegCommand = $"-i \"{videoFilePath}\" -i \"{audioFilePath}\" -c:v copy -c:a aac -strict experimental \"{combinedFilePath}\"";
var process = new Process
{
StartInfo = new ProcessStartInfo
{
FileName = pathToFFMPEG, // Ensure ffmpeg is accessible from PATH
Arguments = ffmpegCommand,
RedirectStandardOutput = true,
RedirectStandardError = true,
UseShellExecute = false,
CreateNoWindow = true
}
};
try
{
process.Start();
string output = process.StandardOutput.ReadToEnd();
string error = process.StandardError.ReadToEnd();
process.WaitForExit();
if (process.ExitCode == 0)
{
Debug.Log("Combined video and audio saved to " + combinedFilePath);
if (deleteSourceFiles)
{
DeleteSourceFiles();
}
}
else
{
Debug.LogError("FFmpeg failed with exit code " + process.ExitCode);
Debug.LogError("FFmpeg Output: " + output);
Debug.LogError("FFmpeg Errors: " + error);
}
}
catch (Exception ex)
{
Debug.LogError("An error occurred while combining video and audio: " + ex.Message);
}
}
private void DeleteSourceFiles()
{
try
{
if (File.Exists(videoFilePath))
{
File.Delete(videoFilePath);
Debug.Log("Deleted video file: " + videoFilePath);
}
if (File.Exists(audioFilePath))
{
File.Delete(audioFilePath);
Debug.Log("Deleted audio file: " + audioFilePath);
}
}
catch (Exception ex)
{
Debug.LogError("An error occurred while deleting source files: " + ex.Message);
}
}
}
}
AudioRecorder class
using System;
using System.Collections.Generic;
using System.IO;
using System.Runtime.InteropServices;
using FMODUnity;
namespace Recorder
{
public class AudioRecorder
{
private FMOD.DSP mDSP;
private FMOD.ChannelGroup mCg;
private readonly List<float> mAudioData;
private readonly int mSampleRate;
private int mNumChannels;
private FMOD.DSP_DESCRIPTION mDSPDescription;
public AudioRecorder()
{
mAudioData = new List<float>();
RuntimeManager.CoreSystem.getSoftwareFormat(out mSampleRate, out _, out _);
var mObjHandle = GCHandle.Alloc(this, GCHandleType.Pinned);
mDSPDescription = new FMOD.DSP_DESCRIPTION
{
numinputbuffers = 1,
numoutputbuffers = 1,
read = CaptureDSPReadCallback,
userdata = GCHandle.ToIntPtr(mObjHandle)
};
}
public void StartRecording()
{
mAudioData.Clear();
var bus = RuntimeManager.GetBus("bus:/");
if (bus.getChannelGroup(out mCg) != FMOD.RESULT.OK) return;
RuntimeManager.CoreSystem.createDSP(ref mDSPDescription, out mDSP);
mCg.addDSP(0, mDSP);
}
public void StopRecording()
{
if (!mDSP.hasHandle()) return;
mCg.removeDSP(mDSP);
mDSP.release();
}
public void SaveAudioToWav(string filePath)
{
using var fs = File.Create(filePath);
using var bw = new BinaryWriter(fs);
WriteWavHeader(bw, mAudioData.Count);
var bytes = new byte[mAudioData.Count * 4];
Buffer.BlockCopy(mAudioData.ToArray(), 0, bytes, 0, bytes.Length);
fs.Write(bytes, 0, bytes.Length);
}
[AOT.MonoPInvokeCallback(typeof(FMOD.DSP_READ_CALLBACK))]
private static FMOD.RESULT CaptureDSPReadCallback(ref FMOD.DSP_STATE dspState, IntPtr inBuffer, IntPtr outBuffer, uint length, int inChannels, ref int outChannels)
{
var lengthElements = (int)length * inChannels;
var data = new float[lengthElements];
Marshal.Copy(inBuffer, data, 0, lengthElements);
var functions = (FMOD.DSP_STATE_FUNCTIONS)Marshal.PtrToStructure(dspState.functions, typeof(FMOD.DSP_STATE_FUNCTIONS));
functions.getuserdata(ref dspState, out var userData);
if (userData != IntPtr.Zero)
{
var objHandle = GCHandle.FromIntPtr(userData);
if (objHandle.Target is AudioRecorder { mAudioData: { } } obj)
{
obj.mNumChannels = inChannels;
obj.mAudioData.AddRange(data);
}
}
Marshal.Copy(data, 0, outBuffer, lengthElements);
return FMOD.RESULT.OK;
}
private void WriteWavHeader(BinaryWriter bw, int length)
{
bw.Seek(0, SeekOrigin.Begin);
bw.Write(System.Text.Encoding.ASCII.GetBytes("RIFF"));
bw.Write(32 + length * 4 - 8);
bw.Write(System.Text.Encoding.ASCII.GetBytes("WAVEfmt "));
bw.Write(16);
bw.Write((short)3);
bw.Write((short)mNumChannels);
bw.Write(mSampleRate);
bw.Write(mSampleRate * 32 / 8 * mNumChannels);
bw.Write((short)(32 / 8 * mNumChannels));
bw.Write((short)32);
bw.Write(System.Text.Encoding.ASCII.GetBytes("data"));
bw.Write(length * 4);
}
}
}