forked from ColinLeung-NiloCat/UnityURPToonLitShaderExample
-
Notifications
You must be signed in to change notification settings - Fork 0
/
SimpleURPToonLitOutlineExample_Shared.hlsl
508 lines (417 loc) · 22.1 KB
/
SimpleURPToonLitOutlineExample_Shared.hlsl
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
// For more information, visit -> https://github.com/ColinLeung-NiloCat/UnityURPToonLitShaderExample
// #pragma once is a safe guard best practice in almost every .hlsl (need Unity2020 or up),
// doing this can make sure your .hlsl's user can include this .hlsl anywhere anytime without producing any multi include conflict
#pragma once
// We don't have "UnityCG.cginc" in SRP/URP's package anymore, so:
// Including the following two hlsl files is enough for shading with Universal Pipeline. Everything is included in them.
// Core.hlsl will include SRP shader library, all constant buffers not related to materials (perobject, percamera, perframe).
// It also includes matrix/space conversion functions and fog.
// Lighting.hlsl will include the light functions/data to abstract light constants. You should use GetMainLight and GetLight functions
// that initialize Light struct. Lighting.hlsl also include GI, Light BDRF functions. It also includes Shadows.
// Required by all Universal Render Pipeline shaders.
// It will include Unity built-in shader variables (except the lighting variables)
// (https://docs.unity3d.com/Manual/SL-UnityShaderVariables.html
// It will also include many utilitary functions.
#include "Packages/com.unity.render-pipelines.universal/ShaderLibrary/Core.hlsl"
// Include this if you are doing a lit shader. This includes lighting shader variables,
// lighting and shadow functions
#include "Packages/com.unity.render-pipelines.universal/ShaderLibrary/Lighting.hlsl"
// Material shader variables are not defined in SRP or URP shader library.
// This means _BaseColor, _BaseMap, _BaseMap_ST, and all variables in the Properties section of a shader
// must be defined by the shader itself. If you define all those properties in CBUFFER named
// UnityPerMaterial, SRP can cache the material properties between frames and reduce significantly the cost
// of each drawcall.
// In this case, although URP's LitInput.hlsl contains the CBUFFER for the material
// properties defined above. As one can see this is not part of the ShaderLibrary, it specific to the
// URP Lit shader.
// So we are not going to use LitInput.hlsl, we will implement everything by ourself.
//#include "Packages/com.unity.render-pipelines.universal/Shaders/LitInput.hlsl"
// we will include some utility .hlsl files to help us
#include "NiloOutlineUtil.hlsl"
#include "NiloZOffset.hlsl"
#include "NiloInvLerpRemap.hlsl"
// note:
// subfix OS means object spaces (e.g. positionOS = position object space)
// subfix WS means world space (e.g. positionWS = position world space)
// subfix VS means view space (e.g. positionVS = position view space)
// subfix CS means clip space (e.g. positionCS = position clip space)
// all pass will share this Attributes struct (define data needed from Unity app to our vertex shader)
struct Attributes
{
float3 positionOS : POSITION;
half3 normalOS : NORMAL;
half4 tangentOS : TANGENT;
float2 uv : TEXCOORD0;
// to support GPU instancing and Single Pass Stereo rendering(VR), add the following section
UNITY_VERTEX_INPUT_INSTANCE_ID // For non PSSL, equals to -> uint instanceID : SV_InstanceID;
};
// all pass will share this Varyings struct (define data needed from our vertex shader to our fragment shader)
struct Varyings
{
float2 uv : TEXCOORD0;
float4 positionWSAndFogFactor : TEXCOORD1; // xyz: positionWS, w: vertex fog factor
half3 normalWS : TEXCOORD2;
float4 positionCS : SV_POSITION;
// to support GPU instancing and Single Pass Stereo rendering(VR), add the following section
UNITY_VERTEX_INPUT_INSTANCE_ID // For non PSSL, equals to -> uint instanceID : SV_InstanceID;
UNITY_VERTEX_OUTPUT_STEREO // For non OpenGL and non PSSL, equals to -> uint stereoTargetEyeIndexAsRTArrayIdx : SV_RenderTargetArrayIndex; (when UNITY_STEREO_INSTANCING_ENABLED)
};
///////////////////////////////////////////////////////////////////////////////////////
// CBUFFER and Uniforms
// (you should put all uniforms of all passes inside this single UnityPerMaterial CBUFFER! else SRP batching is not possible!)
///////////////////////////////////////////////////////////////////////////////////////
// all sampler2D don't need to put inside CBUFFER
sampler2D _BaseMap;
sampler2D _EmissionMap;
sampler2D _OcclusionMap;
sampler2D _OutlineZOffsetMaskTex;
// put all your uniforms(usually things inside .shader file's properties{}) inside this CBUFFER, in order to make SRP batcher compatible
// see -> https://blogs.unity3d.com/2019/02/28/srp-batcher-speed-up-your-rendering/
CBUFFER_START(UnityPerMaterial)
// high level settings
float _IsFace;
// base color
float4 _BaseMap_ST;
half4 _BaseColor;
// alpha
half _Cutoff;
// emission
float _UseEmission;
half3 _EmissionColor;
half _EmissionMulByBaseColor;
half3 _EmissionMapChannelMask;
// occlusion
float _UseOcclusion;
half _OcclusionStrength;
half4 _OcclusionMapChannelMask;
half _OcclusionRemapStart;
half _OcclusionRemapEnd;
// lighting
half3 _IndirectLightMinColor;
half _CelShadeMidPoint;
half _CelShadeSoftness;
// shadow mapping
half _ReceiveShadowMappingAmount;
float _ReceiveShadowMappingPosOffset;
half3 _ShadowMapColor;
// outline
float _OutlineWidth;
half3 _OutlineColor;
float _OutlineZOffset;
float _OutlineZOffsetMaskRemapStart;
float _OutlineZOffsetMaskRemapEnd;
CBUFFER_END
//a special uniform for applyShadowBiasFixToHClipPos() only, it is not a per material uniform,
//so it is fine to write it outside our UnityPerMaterial CBUFFER
float3 _LightDirection;
struct ToonSurfaceData
{
half3 albedo;
half alpha;
half3 emission;
half occlusion;
};
struct ToonLightingData
{
half3 normalWS;
float3 positionWS;
half3 viewDirectionWS;
float4 shadowCoord;
};
///////////////////////////////////////////////////////////////////////////////////////
// vertex shared functions
///////////////////////////////////////////////////////////////////////////////////////
float3 TransformPositionWSToOutlinePositionWS(float3 positionWS, float positionVS_Z, float3 normalWS)
{
//you can replace it to your own method! Here we will write a simple world space method for tutorial reason, it is not the best method!
float outlineExpandAmount = _OutlineWidth * GetOutlineCameraFovAndDistanceFixMultiplier(positionVS_Z);
#if defined(UNITY_STEREO_INSTANCING_ENABLED) || defined(UNITY_STEREO_MULTIVIEW_ENABLED) || defined(UNITY_STEREO_DOUBLE_WIDE_ENABLED)
outlineExpandAmount *= 0.5;
#endif
return positionWS + normalWS * outlineExpandAmount;
}
// if "ToonShaderIsOutline" is not defined = do regular MVP transform
// if "ToonShaderIsOutline" is defined = do regular MVP transform + push vertex out a bit according to normal direction
Varyings VertexShaderWork(Attributes input)
{
Varyings output;
/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
// after invalid/discard vertex, do this part asap.
// to support GPU instancing and Single Pass Stereo rendering(VR), add the following section
/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
UNITY_SETUP_INSTANCE_ID(input); // will turn into this in non OpenGL and non PSSL -> UnitySetupInstanceID(input.instanceID);
UNITY_TRANSFER_INSTANCE_ID(input, output); // will turn into this in non OpenGL and non PSSL -> output.instanceID = input.instanceID;
UNITY_INITIALIZE_VERTEX_OUTPUT_STEREO(output); // will turn into this in non OpenGL and non PSSL -> output.stereoTargetEyeIndexAsRTArrayIdx = unity_StereoEyeIndex;
// VertexPositionInputs contains position in multiple spaces (world, view, homogeneous clip space, ndc)
// Unity compiler will strip all unused references (say you don't use view space).
// Therefore there is more flexibility at no additional cost with this struct.
VertexPositionInputs vertexInput = GetVertexPositionInputs(input.positionOS);
// Similar to VertexPositionInputs, VertexNormalInputs will contain normal, tangent and bitangent
// in world space. If not used it will be stripped.
VertexNormalInputs vertexNormalInput = GetVertexNormalInputs(input.normalOS, input.tangentOS);
float3 positionWS = vertexInput.positionWS;
#ifdef ToonShaderIsOutline
positionWS = TransformPositionWSToOutlinePositionWS(vertexInput.positionWS, vertexInput.positionVS.z, vertexNormalInput.normalWS);
#endif
// Computes fog factor per-vertex.
float fogFactor = ComputeFogFactor(vertexInput.positionCS.z);
// TRANSFORM_TEX is the same as the old shader library.
output.uv = TRANSFORM_TEX(input.uv,_BaseMap);
// packing positionWS(xyz) & fog(w) into a vector4
output.positionWSAndFogFactor = float4(positionWS, fogFactor);
output.normalWS = vertexNormalInput.normalWS; //normlaized already by GetVertexNormalInputs(...)
output.positionCS = TransformWorldToHClip(positionWS);
#ifdef ToonShaderIsOutline
// [Read ZOffset mask texture]
// we can't use tex2D() in vertex shader because ddx & ddy is unknown before rasterization,
// so use tex2Dlod() with an explict mip level 0, put explict mip level 0 inside the 4th component of param uv)
float outlineZOffsetMaskTexExplictMipLevel = 0;
float outlineZOffsetMask = tex2Dlod(_OutlineZOffsetMaskTex, float4(input.uv,0,outlineZOffsetMaskTexExplictMipLevel)).r; //we assume it is a Black/White texture
// [Remap ZOffset texture value]
// flip texture read value so default black area = apply ZOffset, because usually outline mask texture are using this format(black = hide outline)
outlineZOffsetMask = 1-outlineZOffsetMask;
outlineZOffsetMask = invLerpClamp(_OutlineZOffsetMaskRemapStart,_OutlineZOffsetMaskRemapEnd,outlineZOffsetMask);// allow user to flip value or remap
// [Apply ZOffset, Use remapped value as ZOffset mask]
output.positionCS = NiloGetNewClipPosWithZOffset(output.positionCS, _OutlineZOffset * outlineZOffsetMask + 0.03 * _IsFace);
#endif
// ShadowCaster pass needs special process to positionCS, else shadow artifact will appear
//--------------------------------------------------------------------------------------
#ifdef ToonShaderApplyShadowBiasFix
// see GetShadowPositionHClip() in URP/Shaders/ShadowCasterPass.hlsl
// https://github.com/Unity-Technologies/Graphics/blob/master/com.unity.render-pipelines.universal/Shaders/ShadowCasterPass.hlsl
float4 positionCS = TransformWorldToHClip(ApplyShadowBias(positionWS, output.normalWS, _LightDirection));
#if UNITY_REVERSED_Z
positionCS.z = min(positionCS.z, positionCS.w * UNITY_NEAR_CLIP_VALUE);
#else
positionCS.z = max(positionCS.z, positionCS.w * UNITY_NEAR_CLIP_VALUE);
#endif
output.positionCS = positionCS;
#endif
//--------------------------------------------------------------------------------------
return output;
}
///////////////////////////////////////////////////////////////////////////////////////
// fragment shared functions (Step1: prepare data structs for lighting calculation)
///////////////////////////////////////////////////////////////////////////////////////
half4 GetFinalBaseColor(Varyings input)
{
return tex2D(_BaseMap, input.uv) * _BaseColor;
}
half3 GetFinalEmissionColor(Varyings input)
{
half3 result = 0;
if(_UseEmission)
{
result = tex2D(_EmissionMap, input.uv).rgb * _EmissionMapChannelMask * _EmissionColor.rgb;
}
return result;
}
half GetFinalOcculsion(Varyings input)
{
half result = 1;
if(_UseOcclusion)
{
half4 texValue = tex2D(_OcclusionMap, input.uv);
half occlusionValue = dot(texValue, _OcclusionMapChannelMask);
occlusionValue = lerp(1, occlusionValue, _OcclusionStrength);
occlusionValue = invLerpClamp(_OcclusionRemapStart, _OcclusionRemapEnd, occlusionValue);
result = occlusionValue;
}
return result;
}
void DoClipTestToTargetAlphaValue(half alpha)
{
#if _UseAlphaClipping
clip(alpha - _Cutoff);
#endif
}
ToonSurfaceData InitializeSurfaceData(Varyings input)
{
ToonSurfaceData output;
// albedo & alpha
float4 baseColorFinal = GetFinalBaseColor(input);
output.albedo = baseColorFinal.rgb;
output.alpha = baseColorFinal.a;
DoClipTestToTargetAlphaValue(output.alpha);// early exit if possible
// emission
output.emission = GetFinalEmissionColor(input);
// occlusion
output.occlusion = GetFinalOcculsion(input);
return output;
}
ToonLightingData InitializeLightingData(Varyings input)
{
ToonLightingData lightingData;
lightingData.positionWS = input.positionWSAndFogFactor.xyz;
lightingData.viewDirectionWS = SafeNormalize(GetCameraPositionWS() - lightingData.positionWS);
lightingData.normalWS = normalize(input.normalWS); //interpolated normal is NOT unit vector, we need to normalize it
return lightingData;
}
///////////////////////////////////////////////////////////////////////////////////////
// fragment shared functions (Step2: calculate lighting & final color)
///////////////////////////////////////////////////////////////////////////////////////
// all lighting equation written inside this .hlsl,
// just by editing this .hlsl can control most of the visual result.
#include "SimpleURPToonLitOutlineExample_LightingEquation.hlsl"
// this function contains no lighting logic, it just pass lighting results data around
// the job done in this function is "do shadow mapping depth test positionWS offset"
half3 ShadeAllLights(ToonSurfaceData surfaceData, ToonLightingData lightingData)
{
// Indirect lighting
half3 indirectResult = ShadeGI(surfaceData, lightingData);
//////////////////////////////////////////////////////////////////////////////////
// Light struct is provided by URP to abstract light shader variables.
// It contains light's
// - direction
// - color
// - distanceAttenuation
// - shadowAttenuation
//
// URP take different shading approaches depending on light and platform.
// You should never reference light shader variables in your shader, instead use the
// -GetMainLight()
// -GetLight()
// funcitons to fill this Light struct.
//////////////////////////////////////////////////////////////////////////////////
//==============================================================================================
// Main light is the brightest directional light.
// It is shaded outside the light loop and it has a specific set of variables and shading path
// so we can be as fast as possible in the case when there's only a single directional light
// You can pass optionally a shadowCoord. If so, shadowAttenuation will be computed.
Light mainLight = GetMainLight();
float3 shadowTestPosWS = lightingData.positionWS + mainLight.direction * (_ReceiveShadowMappingPosOffset + _IsFace);
#ifdef _MAIN_LIGHT_SHADOWS
// compute the shadow coords in the fragment shader now due to this change
// https://forum.unity.com/threads/shadow-cascades-weird-since-7-2-0.828453/#post-5516425
// _ReceiveShadowMappingPosOffset will control the offset the shadow comparsion position,
// doing this is usually for hide ugly self shadow for shadow sensitive area like face
float4 shadowCoord = TransformWorldToShadowCoord(shadowTestPosWS);
mainLight.shadowAttenuation = MainLightRealtimeShadow(shadowCoord);
#endif
// Main light
half3 mainLightResult = ShadeSingleLight(surfaceData, lightingData, mainLight, false);
//==============================================================================================
// All additional lights
half3 additionalLightSumResult = 0;
#ifdef _ADDITIONAL_LIGHTS
// Returns the amount of lights affecting the object being renderer.
// These lights are culled per-object in the forward renderer of URP.
int additionalLightsCount = GetAdditionalLightsCount();
for (int i = 0; i < additionalLightsCount; ++i)
{
// Similar to GetMainLight(), but it takes a for-loop index. This figures out the
// per-object light index and samples the light buffer accordingly to initialized the
// Light struct. If ADDITIONAL_LIGHT_CALCULATE_SHADOWS is defined it will also compute shadows.
int perObjectLightIndex = GetPerObjectLightIndex(i);
Light light = GetAdditionalPerObjectLight(perObjectLightIndex, lightingData.positionWS); // use original positionWS for lighting
light.shadowAttenuation = AdditionalLightRealtimeShadow(perObjectLightIndex, shadowTestPosWS); // use offseted positionWS for shadow test
// Different function used to shade additional lights.
additionalLightSumResult += ShadeSingleLight(surfaceData, lightingData, light, true);
}
#endif
//==============================================================================================
// emission
half3 emissionResult = ShadeEmission(surfaceData, lightingData);
return CompositeAllLightResults(indirectResult, mainLightResult, additionalLightSumResult, emissionResult, surfaceData, lightingData);
}
half3 ConvertSurfaceColorToOutlineColor(half3 originalSurfaceColor)
{
return originalSurfaceColor * _OutlineColor;
}
half3 ApplyFog(half3 color, Varyings input)
{
half fogFactor = input.positionWSAndFogFactor.w;
// Mix the pixel color with fogColor. You can optionaly use MixFogColor to override the fogColor
// with a custom one.
color = MixFog(color, fogFactor);
return color;
}
// only the .shader file will call this function by
// #pragma fragment ShadeFinalColor
half4 ShadeFinalColor(Varyings input) : SV_TARGET
{
// to support GPU instancing and Single Pass Stereo rendering(VR), add the following section
//------------------------------------------------------------------------------------------------------------------------------
UNITY_SETUP_INSTANCE_ID(input); // in non OpenGL and non PSSL, MACRO will turn into -> UnitySetupInstanceID(input.instanceID);
UNITY_SETUP_STEREO_EYE_INDEX_POST_VERTEX(input); // in non OpenGL and non PSSL, MACRO will turn into -> unity_StereoEyeIndex = input.stereoTargetEyeIndexAsRTArrayIdx;
//------------------------------------------------------------------------------------------------------------------------------
//////////////////////////////////////////////////////////////////////////////////////////
// first prepare all data for lighting function
//////////////////////////////////////////////////////////////////////////////////////////
// fillin ToonSurfaceData struct:
ToonSurfaceData surfaceData = InitializeSurfaceData(input);
// fillin ToonLightingData struct:
ToonLightingData lightingData = InitializeLightingData(input);
// apply all lighting calculation
half3 color = ShadeAllLights(surfaceData, lightingData);
#ifdef ToonShaderIsOutline
color = ConvertSurfaceColorToOutlineColor(color);
#endif
color = ApplyFog(color, input);
return half4(color, surfaceData.alpha);
}
//////////////////////////////////////////////////////////////////////////////////////////
// fragment shared functions (for ShadowCaster, DepthOnly, DepthNormalsOnly pass to use only)
//////////////////////////////////////////////////////////////////////////////////////////
// copy and edit of ShadowCasterPass.hlsl
void AlphaClipAndLODTest(Varyings input)
{
DoClipTestToTargetAlphaValue(GetFinalBaseColor(input).a);
#ifdef LOD_FADE_CROSSFADE
LODFadeCrossFade(input.positionCS);
#endif
}
// copy and edit of DepthOnlyPass.hlsl
half DepthOnlyFragment(Varyings input) : SV_TARGET
{
UNITY_SETUP_STEREO_EYE_INDEX_POST_VERTEX(input);
AlphaClipAndLODTest(input);
return input.positionCS.z;
}
// copy and edit of LitDepthNormalsPass.hlsl
void DepthNormalsFragment(
Varyings input
, out half4 outNormalWS : SV_Target0
#ifdef _WRITE_RENDERING_LAYERS
, out float4 outRenderingLayers : SV_Target1
#endif
)
{
UNITY_SETUP_STEREO_EYE_INDEX_POST_VERTEX(input);
AlphaClipAndLODTest(input);
#if defined(_GBUFFER_NORMALS_OCT)
float3 normalWS = normalize(input.normalWS);
float2 octNormalWS = PackNormalOctQuadEncode(normalWS); // values between [-1, +1], must use fp32 on some platforms
float2 remappedOctNormalWS = saturate(octNormalWS * 0.5 + 0.5); // values between [ 0, 1]
half3 packedNormalWS = PackFloat2To888(remappedOctNormalWS); // values between [ 0, 1]
outNormalWS = half4(packedNormalWS, 0.0);
#else
float2 uv = input.uv;
#if defined(_PARALLAXMAP)
#if defined(REQUIRES_TANGENT_SPACE_VIEW_DIR_INTERPOLATOR)
half3 viewDirTS = input.viewDirTS;
#else
half3 viewDirTS = GetViewDirectionTangentSpace(input.tangentWS, input.normalWS, input.viewDirWS);
#endif
ApplyPerPixelDisplacement(viewDirTS, uv);
#endif
#if defined(_NORMALMAP) || defined(_DETAIL)
float sgn = input.tangentWS.w; // should be either +1 or -1
float3 bitangent = sgn * cross(input.normalWS.xyz, input.tangentWS.xyz);
float3 normalTS = SampleNormal(uv, TEXTURE2D_ARGS(_BumpMap, sampler_BumpMap), _BumpScale);
#if defined(_DETAIL)
half detailMask = SAMPLE_TEXTURE2D(_DetailMask, sampler_DetailMask, uv).a;
float2 detailUv = uv * _DetailAlbedoMap_ST.xy + _DetailAlbedoMap_ST.zw;
normalTS = ApplyDetailNormal(detailUv, normalTS, detailMask);
#endif
float3 normalWS = TransformTangentToWorld(normalTS, half3x3(input.tangentWS.xyz, bitangent.xyz, input.normalWS.xyz));
#else
float3 normalWS = input.normalWS;
#endif
outNormalWS = half4(NormalizeNormalPerPixel(normalWS), 0.0);
#endif
#ifdef _WRITE_RENDERING_LAYERS
uint renderingLayers = GetMeshRenderingLayer();
outRenderingLayers = float4(EncodeMeshRenderingLayer(renderingLayers), 0, 0, 0);
#endif
}