#if (CINEMACHINE_HDRP || CINEMACHINE_URP)
using UnityEngine;
using UnityEngine.Serialization;
using System.Collections.Generic;
using UnityEngine.Rendering;
#if CINEMACHINE_HDRP
using UnityEngine.Rendering.HighDefinition;
#elif CINEMACHINE_URP
using UnityEngine.Rendering.Universal;
#endif
namespace Unity.Cinemachine
{
///
/// This behaviour is a liaison between Cinemachine with the Post-Processing v3 module,
/// shipped with HDRP and URP.
///
/// As a component on the Virtual Camera, it holds
/// a Post-Processing Profile asset that will be applied to the Unity camera whenever
/// the Virtual camera is live. It also has the optional functionality of animating
/// the Focus Distance and DepthOfField properties of the Camera State, and
/// applying them to the current Post-Processing profile, provided that profile has a
/// DepthOfField effect that is enabled.
///
[ExecuteAlways]
[AddComponentMenu("Cinemachine/Procedural/Extensions/Cinemachine Volume Settings")]
[SaveDuringPlay]
[DisallowMultipleComponent]
[HelpURL(Documentation.BaseURL + "manual/CinemachineVolumeSettings.html")]
public class CinemachineVolumeSettings : CinemachineExtension
{
///
/// This is the priority for the vcam's PostProcessing volumes. It's set to a high
/// number in order to ensure that it overrides other volumes for the active vcam.
/// You can change this value if necessary to work with other systems.
///
public static float s_VolumePriority = 1000f;
///
/// This is the weight that the PostProcessing profile will have when the camera is fully active.
/// It will blend to and from 0 along with the camera.
///
public float Weight = 1;
/// The reference object for focus tracking
public enum FocusTrackingMode
{
/// No focus tracking
None,
/// Focus offset is relative to the LookAt target
LookAtTarget,
/// Focus offset is relative to the Follow target
FollowTarget,
/// Focus offset is relative to the Custom target set here
CustomTarget,
/// Focus offset is relative to the camera
Camera
};
/// If the profile has the appropriate overrides, will set the base focus
/// distance to be the distance from the selected target to the camera.
/// The Focus Offset field will then modify that distance
[Tooltip("If the profile has the appropriate overrides, will set the base focus "
+ "distance to be the distance from the selected target to the camera."
+ "The Focus Offset field will then modify that distance.")]
[FormerlySerializedAs("m_FocusTracking")]
public FocusTrackingMode FocusTracking;
/// The target to use if Focus Tracks Target is set to Custom Target
[Tooltip("The target to use if Focus Tracks Target is set to Custom Target")]
[FormerlySerializedAs("m_FocusTarget")]
public Transform FocusTarget;
/// Offset from target distance, to be used with Focus Tracks Target.
/// Offsets the sharpest point away from the focus target
[Tooltip("Offset from target distance, to be used with Focus Tracks Target. "
+ "Offsets the sharpest point away from the focus target.")]
[FormerlySerializedAs("m_FocusOffset")]
public float FocusOffset;
///
/// If Focus tracking is enabled, this will return the calculated focus distance
///
public float CalculatedFocusDistance { get; private set; }
///
/// This profile will be applied whenever this virtual camera is live
///
[Tooltip("This profile will be applied whenever this virtual camera is live")]
[FormerlySerializedAs("m_Profile")]
public VolumeProfile Profile;
class VcamExtraState : VcamExtraStateBase
{
public VolumeProfile ProfileCopy;
public void CreateProfileCopy(VolumeProfile source)
{
DestroyProfileCopy();
VolumeProfile profile = ScriptableObject.CreateInstance();
for (int i = 0; source != null && i < source.components.Count; ++i)
{
var itemCopy = Instantiate(source.components[i]);
profile.components.Add(itemCopy);
profile.isDirty = true;
}
ProfileCopy = profile;
}
public void DestroyProfileCopy()
{
if (ProfileCopy != null)
RuntimeUtility.DestroyObject(ProfileCopy);
ProfileCopy = null;
}
}
List m_extraStateCache;
/// True if the profile is enabled and nontrivial
public bool IsValid => Profile != null && Profile.components.Count > 0;
/// Called by the editor when the shared asset has been edited
public void InvalidateCachedProfile()
{
m_extraStateCache ??= new();
GetAllExtraStates(m_extraStateCache);
for (int i = 0; i < m_extraStateCache.Count; ++i)
m_extraStateCache[i].DestroyProfileCopy();
}
void OnValidate()
{
Weight = Mathf.Max(0, Weight);
}
void Reset()
{
Weight = 1;
FocusTracking = FocusTrackingMode.None;
FocusTarget = null;
FocusOffset = 0;
Profile = null;
}
protected override void OnEnable()
{
InvalidateCachedProfile();
}
protected override void OnDestroy()
{
InvalidateCachedProfile();
base.OnDestroy();
}
/// Apply PostProcessing effects
/// The virtual camera being processed
/// The current pipeline stage
/// The current virtual camera state
/// The current applicable deltaTime
protected override void PostPipelineStageCallback(
CinemachineVirtualCameraBase vcam,
CinemachineCore.Stage stage, ref CameraState state, float deltaTime)
{
// Set the focus after the camera has been fully positioned.
if (stage == CinemachineCore.Stage.Finalize)
{
var extra = GetExtraState(vcam);
if (!IsValid)
extra.DestroyProfileCopy();
else
{
var profile = Profile;
// Handle Follow Focus
if (FocusTracking == FocusTrackingMode.None)
extra.DestroyProfileCopy();
else
{
if (extra.ProfileCopy == null)
extra.CreateProfileCopy(Profile);
profile = extra.ProfileCopy;
if (profile.TryGet(out DepthOfField dof))
{
float focusDistance = FocusOffset;
if (FocusTracking == FocusTrackingMode.LookAtTarget)
focusDistance += (state.GetFinalPosition()- state.ReferenceLookAt).magnitude;
else
{
Transform focusTarget = null;
switch (FocusTracking)
{
default: break;
case FocusTrackingMode.FollowTarget: focusTarget = vcam.Follow; break;
case FocusTrackingMode.CustomTarget: focusTarget = FocusTarget; break;
}
if (focusTarget != null)
focusDistance += (state.GetFinalPosition() - focusTarget.position).magnitude;
}
CalculatedFocusDistance = focusDistance = Mathf.Max(0, focusDistance);
dof.focusDistance.value = focusDistance;
state.Lens.PhysicalProperties.FocusDistance = focusDistance;
profile.isDirty = true;
}
}
// Apply the post-processing
state.AddCustomBlendable(new CameraState.CustomBlendableItems.Item { Custom = profile, Weight = Weight });
}
}
}
static void OnCameraCut(ICinemachineCamera.ActivationEventParams evt)
{
if (!evt.IsCut)
return;
var brain = evt.Origin as CinemachineBrain;
var cam = brain == null ? null : brain.OutputCamera;
#if CINEMACHINE_HDRP
// Reset temporal effects
if (cam != null)
{
HDCamera hdCam = HDCamera.GetOrCreate(cam);
hdCam.volumetricHistoryIsValid = false;
hdCam.colorPyramidHistoryIsValid = false;
hdCam.Reset();
}
#elif CINEMACHINE_URP
// Reset temporal effects
if (cam != null && cam.TryGetComponent(out var data))
data.resetHistory = true;
#endif
}
static void ApplyPostFX(CinemachineBrain brain)
{
CameraState state = brain.State;
int numBlendables = state.GetNumCustomBlendables();
var volumes = GetDynamicBrainVolumes(brain, numBlendables);
for (int i = 0; i < volumes.Count; ++i)
{
volumes[i].weight = 0;
volumes[i].sharedProfile = null;
volumes[i].profile = null;
}
Volume firstVolume = null;
int numPPblendables = 0;
for (int i = 0; i < numBlendables; ++i)
{
var b = state.GetCustomBlendable(i);
var profile = b.Custom as VolumeProfile;
if (!(profile == null)) // in case it was deleted
{
var v = volumes[i];
if (firstVolume == null)
firstVolume = v;
v.sharedProfile = profile;
v.isGlobal = true;
v.priority = s_VolumePriority - (numBlendables - i) - 1;
v.weight = b.Weight;
++numPPblendables;
}
#if false // set this to true to force first weight to 1
// If more than one volume, then set the frst one's weight to 1
if (numPPblendables > 1)
firstVolume.weight = 1;
#endif
}
// if (firstVolume != null)
// Debug.Log($"Applied post FX for {numPPblendables} PP blendables in {brain.ActiveVirtualCamera.Name}");
}
const string sVolumeOwnerName = "__CMVolumes";
static List sVolumes = new();
static List GetDynamicBrainVolumes(CinemachineBrain brain, int minVolumes)
{
// Locate the camera's child object that holds our dynamic volumes
GameObject volumeOwner = null;
Transform t = brain.transform;
int numChildren = t.childCount;
sVolumes.Clear();
for (int i = 0; volumeOwner == null && i < numChildren; ++i)
{
GameObject child = t.GetChild(i).gameObject;
if (child.hideFlags == HideFlags.HideAndDontSave)
{
child.GetComponents(sVolumes);
if (sVolumes.Count > 0)
volumeOwner = child;
}
}
if (minVolumes > 0)
{
if (volumeOwner == null)
{
volumeOwner = new GameObject(sVolumeOwnerName);
volumeOwner.hideFlags = HideFlags.HideAndDontSave;
volumeOwner.transform.parent = t;
}
// Update the volume's layer so it will be seen
#if CINEMACHINE_HDRP
brain.gameObject.TryGetComponent(out var data);
#elif CINEMACHINE_URP
brain.gameObject.TryGetComponent(out var data);
#endif
if (data != null)
{
int mask = data.volumeLayerMask;
for (int i = 0; i < 32; ++i)
{
if ((mask & (1 << i)) != 0)
{
volumeOwner.layer = i;
break;
}
}
}
while (sVolumes.Count < minVolumes)
sVolumes.Add(volumeOwner.gameObject.AddComponent());
}
return sVolumes;
}
#if UNITY_EDITOR
[UnityEditor.InitializeOnLoad]
class EditorInitialize { static EditorInitialize() { InitializeModule(); } }
#endif
[RuntimeInitializeOnLoadMethod]
static void InitializeModule()
{
// After the brain pushes the state to the camera, hook in to the PostFX
CinemachineCore.CameraUpdatedEvent.RemoveListener(ApplyPostFX);
CinemachineCore.CameraUpdatedEvent.AddListener(ApplyPostFX);
CinemachineCore.CameraActivatedEvent.RemoveListener(OnCameraCut);
CinemachineCore.CameraActivatedEvent.AddListener(OnCameraCut);
}
}
}
#endif