unity3d 自学杂项 3.0版(实时更新)

来源:互联网 发布:中国特种作业数据库 编辑:程序博客网 时间:2024/06/06 04:38

1.unity 鼠标拖拽物体实现任意角度自旋转

主要涉及函数

Input.GetAxis(“Mouse x”) 可取得鼠标横向(x轴)移动增量

Input.GetAxis(“Mouse y”) 可取得鼠标竖向(y轴)移动增量

通过勾股定理获取拖拽长度,长度越长旋转越快

在project setting--Input 可以设置

直接上代码,看了就明白了


using UnityEngine;using System.Collections;public class startRoate : MonoBehaviour{    private bool onDrag = false;  //是否被拖拽//        public float speed = 6f;   //旋转速度//        private float tempSpeed;   //阻尼速度//     private float axisX = 1;    //鼠标沿水平方向移动的增量//       private float axisY = 1;    //鼠标沿竖直方向移动的增量//       private float cXY;    void OnMouseDown()    {        //接受鼠标按下的事件//         axisX = 0f; axisY = 0f;    }    void OnMouseDrag()     //鼠标拖拽时的操作//     {            onDrag = true;            axisX = -Input.GetAxis("moveX");            //获得鼠标增量//             axisY = Input.GetAxis("moveY");            cXY = Mathf.Sqrt(axisX * axisX + axisY * axisY); //计算鼠标移动的长度//            if (cXY == 0f) { cXY = 1f; }    }    float Rigid()      //计算阻尼速度//        {        if (onDrag)        {            tempSpeed = speed;        }        else        {            if (tempSpeed > 0)            {                tempSpeed -= speed * 2 * Time.deltaTime / cXY; //通过除以鼠标移动长度实现拖拽越长速度减缓越慢//             }            else {                 tempSpeed = 0;             }        }        return tempSpeed;    }    void Update()    {       // this.transform.Rotate(new Vector3(axisY, axisX, 0) * Rigid(), Space.World); //这个是是按照之前方向一直慢速旋转        if (!Input.GetMouseButton(0))        {             onDrag = false;            this.transform.Rotate(new Vector3(axisY, axisX, 0)*0.5f, Space.World);         }    }}


2.一套完整接受各种光源的shader自带反射

https://en.wikibooks.org/wiki/Cg_Programming/Unity/Cookies

存在一个问题就是可以投射阴影但是不接受阴影


Shader "Cg per-pixel lighting with cookies" {   Properties {      _Color ("Diffuse Material Color", Color) = (1,1,1,1)       _SpecColor ("Specular Material Color", Color) = (1,1,1,1)       _Shininess ("Shininess", Float) = 10   }   SubShader {      Pass {             Tags { "LightMode" = "ForwardBase" } // pass for ambient light             // and first directional light source without cookie          CGPROGRAM          #pragma vertex vert           #pragma fragment frag           #include "UnityCG.cginc"         uniform float4 _LightColor0;             // color of light source (from "Lighting.cginc")          // User-specified properties         uniform float4 _Color;          uniform float4 _SpecColor;          uniform float _Shininess;          struct vertexInput {            float4 vertex : POSITION;            float3 normal : NORMAL;         };         struct vertexOutput {            float4 pos : SV_POSITION;            float4 posWorld : TEXCOORD0;            float3 normalDir : TEXCOORD1;         };          vertexOutput vert(vertexInput input)          {            vertexOutput output;             float4x4 modelMatrix = _Object2World;            float4x4 modelMatrixInverse = _World2Object;             output.posWorld = mul(modelMatrix, input.vertex);            output.normalDir = normalize(               mul(float4(input.normal, 0.0), modelMatrixInverse).xyz);            output.pos = mul(UNITY_MATRIX_MVP, input.vertex);            return output;         }          float4 frag(vertexOutput input) : COLOR         {            float3 normalDirection = normalize(input.normalDir);             float3 viewDirection = normalize(               _WorldSpaceCameraPos - input.posWorld.xyz);            float3 lightDirection =                normalize(_WorldSpaceLightPos0.xyz);             float3 ambientLighting =                UNITY_LIGHTMODEL_AMBIENT.rgb * _Color.rgb;             float3 diffuseReflection =                _LightColor0.rgb * _Color.rgb               * max(0.0, dot(normalDirection, lightDirection));             float3 specularReflection;            if (dot(normalDirection, lightDirection) < 0.0)                // light source on the wrong side?            {               specularReflection = float3(0.0, 0.0, 0.0);                   // no specular reflection            }            else // light source on the right side            {               specularReflection = _LightColor0.rgb                   * _SpecColor.rgb * pow(max(0.0, dot(                  reflect(-lightDirection, normalDirection),                   viewDirection)), _Shininess);            }             return float4(ambientLighting + diffuseReflection                + specularReflection, 1.0);         }          ENDCG      }      Pass {             Tags { "LightMode" = "ForwardAdd" }             // pass for additional light sources         Blend One One // additive blending           CGPROGRAM          #pragma multi_compile_lightpass          #pragma vertex vert           #pragma fragment frag           #include "UnityCG.cginc"         uniform float4 _LightColor0;             // color of light source (from "Lighting.cginc")         uniform float4x4 _LightMatrix0; // transformation             // from world to light space (from Autolight.cginc)         #if defined (DIRECTIONAL_COOKIE) || defined (SPOT)            uniform sampler2D _LightTexture0;                // cookie alpha texture map (from Autolight.cginc)         #elif defined (POINT_COOKIE)            uniform samplerCUBE _LightTexture0;                // cookie alpha texture map (from Autolight.cginc)         #endif          // User-specified properties         uniform float4 _Color;          uniform float4 _SpecColor;          uniform float _Shininess;          struct vertexInput {            float4 vertex : POSITION;            float3 normal : NORMAL;         };         struct vertexOutput {            float4 pos : SV_POSITION;            float4 posWorld : TEXCOORD0;               // position of the vertex (and fragment) in world space             float4 posLight : TEXCOORD1;               // position of the vertex (and fragment) in light space            float3 normalDir : TEXCOORD2;               // surface normal vector in world space         };          vertexOutput vert(vertexInput input)          {            vertexOutput output;             float4x4 modelMatrix = _Object2World;            float4x4 modelMatrixInverse = _World2Object;             output.posWorld = mul(modelMatrix, input.vertex);            output.posLight = mul(_LightMatrix0, output.posWorld);            output.normalDir = normalize(               mul(float4(input.normal, 0.0), modelMatrixInverse).xyz);            output.pos = mul(UNITY_MATRIX_MVP, input.vertex);            return output;         }          float4 frag(vertexOutput input) : COLOR         {            float3 normalDirection = normalize(input.normalDir);             float3 viewDirection = normalize(               _WorldSpaceCameraPos - input.posWorld.xyz);            float3 lightDirection;            float attenuation = 1.0;               // by default no attenuation with distance            #if defined (DIRECTIONAL) || defined (DIRECTIONAL_COOKIE)               lightDirection = normalize(_WorldSpaceLightPos0.xyz);            #elif defined (POINT_NOATT)               lightDirection = normalize(                  _WorldSpaceLightPos0 - input.posWorld.xyz);            #elif defined(POINT)||defined(POINT_COOKIE)||defined(SPOT)               float3 vertexToLightSource =                   _WorldSpaceLightPos0.xyz - input.posWorld.xyz;               float distance = length(vertexToLightSource);               attenuation = 1.0 / distance; // linear attenuation                lightDirection = normalize(vertexToLightSource);            #endif             float3 diffuseReflection =                attenuation * _LightColor0.rgb * _Color.rgb               * max(0.0, dot(normalDirection, lightDirection));             float3 specularReflection;            if (dot(normalDirection, lightDirection) < 0.0)                // light source on the wrong side?            {               specularReflection = float3(0.0, 0.0, 0.0);                   // no specular reflection            }            else // light source on the right side            {               specularReflection = attenuation * _LightColor0.rgb                   * _SpecColor.rgb * pow(max(0.0, dot(                  reflect(-lightDirection, normalDirection),                   viewDirection)), _Shininess);            }             float cookieAttenuation = 1.0;                // by default no cookie attenuation            #if defined (DIRECTIONAL_COOKIE)               cookieAttenuation = tex2D(_LightTexture0,                   input.posLight.xy).a;            #elif defined (POINT_COOKIE)               cookieAttenuation = texCUBE(_LightTexture0,                   input.posLight.xyz).a;            #elif defined (SPOT)               cookieAttenuation = tex2D(_LightTexture0,                   input.posLight.xy / input.posLight.w                   + float2(0.5, 0.5)).a;            #endif            return float4(cookieAttenuation                * (diffuseReflection + specularReflection), 1.0);         }          ENDCG      }   }   Fallback "Specular"}


3.shader中旋转贴图uv

贴图旋转有时可以产生更多不一样的效果和细节,比如对于岩石,贴图旋转可以带来更多变化


http://forum.unity3d.com/threads/rotation-of-texture-uvs-directly-from-a-shader.150482/


    Shader "Custom/RotateUVs" {        Properties {            _MainTex ("Base (RGB)", 2D) = "white" {}            _RotationSpeed ("Rotation Speed", Float) = 2.0        }        SubShader {            Tags { "RenderType"="Opaque" }            LOD 200                       CGPROGRAM            #pragma surface surf Lambert vertex:vert                 sampler2D _MainTex;                 struct Input {                float2 uv_MainTex;            };                 float _RotationSpeed;            void vert (inout appdata_full v) {                float sinX = sin ( _RotationSpeed * _Time );                float cosX = cos ( _RotationSpeed * _Time );                float sinY = sin ( _RotationSpeed * _Time );                float2x2 rotationMatrix = float2x2( cosX, -sinX, sinY, cosX);                v.texcoord.xy = mul ( v.texcoord.xy, rotationMatrix );            }                 void surf (Input IN, inout SurfaceOutput o) {                  half4 c = tex2D (_MainTex, IN.uv_MainTex);                o.Albedo = c.rgb;                o.Alpha = c.a;            }            ENDCG        }        FallBack "Diffuse"    }      


http://forum.unity3d.com/threads/rotating-multiple-textures-in-a-shader.368457/


For Unity there's basically two different ways to do shaders (well, 4, but one is effectively deprecated, and the other is only for platform specific stuff), surface shader and vert / frag shader. Behind the scenes Unity convers surface shaders into expanded vert / frag shaders, which then get converted into platform specific shaders, which then get compiled into the final shader code that is what's sent to the drivers which convert that into the final form the GPU actually uses. It's a deep rabbit hole so we'll stick to the top two levels. The deprecated method is the "fixed function" shaders if you're curious. This is what shaders used to look like before you could do arbitrary math, but these two now just get converted into vert / frag shaders.

So, lets start with vert frag for now. The basic rotation in the vertex shader.

Shader "Unlit/Unlit UV Rotation in vertex"{    Properties    {        _MainTex ("Texture", 2D) = "white" {}        _Rotation ("Rotation", Range(0,360)) = 0.0    }    SubShader    {        Tags { "RenderType"="Opaque" }        LOD 100         Pass        {            CGPROGRAM            #pragma vertex vert            #pragma fragment frag            // make fog work            #pragma multi_compile_fog                       #include "UnityCG.cginc"             struct appdata            {                float4 vertex : POSITION;                float2 uv : TEXCOORD0;            };             struct v2f            {                float2 uv : TEXCOORD0;                UNITY_FOG_COORDS(1)                float4 vertex : SV_POSITION;            };             sampler2D _MainTex;            float4 _MainTex_ST;            float _Rotation;                       v2f vert (appdata v)            {                v2f o;                o.vertex = mul(UNITY_MATRIX_MVP, v.vertex);                 // rotating UV                const float Deg2Rad = (UNITY_PI * 2.0) / 360.0;                 float rotationRadians = _Rotation * Deg2Rad; // convert degrees to radians                float s = sin(rotationRadians); // sin and cos take radians, not degrees                float c = cos(rotationRadians);                 float2x2 rotationMatrix = float2x2( c, -s, s, c); // construct simple rotation matrix                 v.uv -= 0.5; // offset UV so we rotate around 0.5 and not 0.0                v.uv = mul(rotationMatrix, v.uv); // apply rotation matrix                v.uv += 0.5; // offset UV again so UVs are in the correct location                 o.uv = TRANSFORM_TEX(v.uv, _MainTex);                UNITY_TRANSFER_FOG(o,o.vertex);                return o;            }                       fixed4 frag (v2f i) : SV_Target            {                // sample the texture                fixed4 col = tex2D(_MainTex, i.uv);                // apply fog                UNITY_APPLY_FOG(i.fogCoord, col);                              return col;            }            ENDCG        }    }}



This is the default "new Shader > Unlit" modified to add rotation. This is probably where you're at now, at least something similar. The form is a little different from most of the other threads on UV rotation because most people get confused by the degree to radian conversion (ie: they don't do it) and they're doing the mul in the wrong order.

So, now we want multiple textures and UV sets with different rotations. This is just a matter of adding additional UVs to the v2f struct and doing the math multiple times.

Shader "Unlit/Unlit UV Rotation of multiple textures in vertex"{    Properties    {        _MainTex ("Texture", 2D) = "white" {}        _RotatedTexA ("Texture", 2D) = "white" {}        _RotationA ("Rotation", Range(0,360)) = 0.0        _RotatedTexB ("Texture", 2D) = "white" {}        _RotationB ("Rotation", Range(0,360)) = 0.0    }    SubShader    {        Tags { "RenderType"="Opaque" }        LOD 100         Pass        {            CGPROGRAM            #pragma vertex vert            #pragma fragment frag            // make fog work            #pragma multi_compile_fog                       #include "UnityCG.cginc"             float2 rotateUV(float2 uv, float degrees)            {                // rotating UV                const float Deg2Rad = (UNITY_PI * 2.0) / 360.0;                float rotationRadians = degrees * Deg2Rad; // convert degrees to radians               float s = sin(rotationRadians); // sin and cos take radians, not degrees               float c = cos(rotationRadians);                 float2x2 rotationMatrix = float2x2( c, -s, s, c); // construct simple rotation matrix                 uv -= 0.5; // offset UV so we rotate around 0.5 and not 0.0                uv = mul(rotationMatrix, uv); // apply rotation matrix                uv += 0.5; // offset UV again so UVs are in the correct location                 return uv;            }             struct appdata            {                float4 vertex : POSITION;                float2 uv : TEXCOORD0;            };             struct v2f            {                float2 uv : TEXCOORD0;                float4 uv2 : TEXCOORD1; // Addition additional UV to pass                UNITY_FOG_COORDS(2) // changed from 1 to 2 since uv2 is using TEXCOORD1 now                float4 vertex : SV_POSITION;            };             sampler2D _MainTex;            float4 _MainTex_ST;            sampler2D _RotatedTexA;            float4 _RotatedTexA_ST;            float _RotationA;            sampler2D _RotatedTexB;            float4 _RotatedTexB_ST;            float _RotationB;                       v2f vert (appdata v)            {                v2f o;                o.vertex = mul(UNITY_MATRIX_MVP, v.vertex);                 o.uv = TRANSFORM_TEX(v.uv, _MainTex);                o.uv2.xy = TRANSFORM_TEX(rotateUV(v.uv, _RotationA), _RotatedTexA);                o.uv2.zw = TRANSFORM_TEX(rotateUV(v.uv, _RotationB), _RotatedTexB);                UNITY_TRANSFER_FOG(o,o.vertex);                return o;            }                       fixed4 frag (v2f i) : SV_Target            {                // sample the texture                fixed4 col = tex2D(_MainTex, i.uv);                 // sample rotated textures                fixed4 colA = tex2D(_RotatedTexA, i.uv2.xy);                fixed4 colB = tex2D(_RotatedTexB, i.uv2.zw);                 // adding the textures together just so you can see them all                col = (col + colA + colB) / 3.0;                 // apply fog                UNITY_APPLY_FOG(i.fogCoord, col);                              return col;            }            ENDCG        }    }}


The rotation code is now a separate function so we can reuse it. We also have a second UV set, a float4 instead of a float2, to the v2f struct and we're using the xy and zw components to pack two UV sets into a single parameter for efficiency.

Now what about doing the rotation in the fragment shader?


<p>Shader "Unlit/Unlit UV Rotation of multiple textures in fragment"</p>{    Properties    {        _MainTex ("Texture", 2D) = "white" {}        _RotatedTexA ("Texture", 2D) = "white" {}        _RotationA ("Rotation", Range(0,360)) = 0.0        _RotatedTexB ("Texture", 2D) = "white" {}        _RotationB ("Rotation", Range(0,360)) = 0.0    }    SubShader    {        Tags { "RenderType"="Opaque" }        LOD 100         Pass        {            CGPROGRAM            #pragma vertex vert            #pragma fragment frag            // make fog work            #pragma multi_compile_fog                       #include "UnityCG.cginc"             float2 rotateUV(float2 uv, float degrees)            {                // rotating UV                const float Deg2Rad = (UNITY_PI * 2.0) / 360.0;                 float rotationRadians = degrees * Deg2Rad; // convert degrees to radians                float s = sin(rotationRadians); // sin and cos take radians, not degrees                float c = cos(rotationRadians);                 float2x2 rotationMatrix = float2x2( c, -s, s, c); // construct simple rotation matrix                 uv -= 0.5; // offset UV so we rotate around 0.5 and not 0.0                uv = mul(rotationMatrix, uv); // apply rotation matrix                uv += 0.5; // offset UV again so UVs are in the correct location                 return uv;            }             struct appdata            {                float4 vertex : POSITION;                float2 uv : TEXCOORD0;            };             struct v2f            {                float2 uv : TEXCOORD0;                UNITY_FOG_COORDS(1)                float4 vertex : SV_POSITION;            };             sampler2D _MainTex;            float4 _MainTex_ST;            sampler2D _RotatedTexA;            float4 _RotatedTexA_ST;            float _RotationA;            sampler2D _RotatedTexB;            float4 _RotatedTexB_ST;            float _RotationB;                       v2f vert (appdata v)            {                v2f o;                o.vertex = mul(UNITY_MATRIX_MVP, v.vertex);                 o.uv = v.uv;                UNITY_TRANSFER_FOG(o,o.vertex);                return o;            }                       fixed4 frag (v2f i) : SV_Target            {                // sample the texture                float2 mainTex_uv = TRANSFORM_TEX(i.uv, _MainTex);                fixed4 col = tex2D(_MainTex, mainTex_uv);                 // sample rotated textures                float2 uvA = TRANSFORM_TEX(rotateUV(i.uv, _RotationA), _RotatedTexA);                float2 uvB = TRANSFORM_TEX(rotateUV(i.uv, _RotationB), _RotatedTexB);                fixed4 colA = tex2D(_RotatedTexA, uvA);                fixed4 colB = tex2D(_RotatedTexB, uvB);                 // adding the textures together just so you can see them all                col = (col + colA + colB) / 3.0;                               // apply fog                UNITY_APPLY_FOG(i.fogCoord, col);                return col;            }            ENDCG        }    }}




All of the UV code is now in the fragment shader, almost literally just copy and pasted (apart from changing v.uv to i.uv and assigning them to different variables). In the vert function we're just passing the mesh's UV on and nothing else. The last two shaders are identical in function, though the vertex one will generally be faster as the math is being done only for each vertex instead of every pixel.

Now you can do stuff like mix these two. Do the TRANSFORM_TEX in the vertex shader for the main tex (this is applying the scale and offset values you see in the editor, which is stored in the float4 _MainTex_ST variable), and pass along the untransformed UVs as the zw components, then do the rotation. If you don't need or want the in editor scale and offset you can add [NoScaleOffset] in front of the texture properties at the start of the shader then remove the float4 _***_ST and TRANSFORM_TEX stuff for that texture. You can also just share those offsets, etc. Whatever you want to do. But I'll leave that for you to figure out.

Now if you want to do this in a surface shader you can do it like the fragment version pretty easily. Just copy the rotateUV function and apply the rotation in the surf function. If you want to do it at the vertex level you'll need to add a custom vertex function to your surf shader and add another UV set to the Input struct.

Shader "Custom/Surface UV Rotation in vertex" {    Properties {        _Color ("Color", Color) = (1,1,1,1)        _MainTex ("Albedo (RGB)", 2D) = "white" {}        _Glossiness ("Smoothness", Range(0,1)) = 0.5        _Metallic ("Metallic", Range(0,1)) = 0.0        [NoScaleOffset] _RotatedTex ("Texture", 2D) = "white" {}        _Rotation ("Rotation", Range(0,360)) = 0.0    }    SubShader {        Tags { "RenderType"="Opaque" }        LOD 200               CGPROGRAM        // Physically based Standard lighting model, and enable shadows on all light types        #pragma surface surf Standard fullforwardshadows vertex:vert         // Use shader model 3.0 target, to get nicer looking lighting        #pragma target 3.0         float2 rotateUV(float2 uv, float degrees)        {            // rotating UV            const float Deg2Rad = (UNITY_PI * 2.0) / 360.0;             float rotationRadians = degrees * Deg2Rad; // convert degrees to radians            float s = sin(rotationRadians); // sin and cos take radians, not degrees            float c = cos(rotationRadians);             float2x2 rotationMatrix = float2x2( c, -s, s, c); // construct simple rotation matrix             uv -= 0.5; // offset UV so we rotate around 0.5 and not 0.0            uv = mul(rotationMatrix, uv); // apply rotation matrix            uv += 0.5; // offset UV again so UVs are in the correct location             return uv;        }         sampler2D _MainTex;        sampler2D _RotatedTex;         struct Input {            float2 uv_MainTex;            float2 rotatedUV;        };         half _Glossiness;        half _Metallic;        fixed4 _Color;        float _Rotation;         void vert (inout appdata_full v, out Input o) {            UNITY_INITIALIZE_OUTPUT(Input,o);            o.rotatedUV = rotateUV(v.texcoord, _Rotation);        }         void surf (Input IN, inout SurfaceOutputStandard o) {            // Albedo comes from a texture tinted by color            fixed4 c = tex2D (_MainTex, IN.uv_MainTex) * _Color;             // rotated texture            fixed4 c2 = tex2D(_RotatedTex, IN.rotatedUV);             // blend the two together so we can see them            c = (c + c2) / 2.0;             o.Albedo = c.rgb;            // Metallic and smoothness come from slider variables            o.Metallic = _Metallic;            o.Smoothness = _Glossiness;            o.Alpha = c.a;        }        ENDCG    }    FallBack "Diffuse"}



I also disabled the in editor texture scaling and offset for the rotated texture, just because that adds another layer of weirdness in surface shaders.



4.Unity3d Halo光晕

plane加上particles/additive的shader,图片可以是中实边虚的圆,alpha控制亮度

5.unity批量制作预制物体Prefab

http://blog.sina.com.cn/s/blog_697b1b8c0101eg9f.html

有时候场景中一大批物体都需要制作成预制物体,但是unity只能手动一个一个的创建,感觉非常的蹩脚,下面一个编辑器类的方法解决你的麻烦。

01static Object CreatePrefab(GameObject go, string name)
02{
03    //先创建一个空的预制物体
04    //预制物体保存在工程中路径,可以修改("Assets/" + name + ".prefab");
05    Object tempPrefab = EditorUtility.CreateEmptyPrefab("Assets/" + name + ".prefab");
06    //然后拿我们场景中的物体替换空的预制物体
07    tempPrefab = EditorUtility.ReplacePrefab(go, tempPrefab);
08    //返回创建后的预制物体
09    return tempPrefab;
10}

这个方法可以随意根据任何规则来写,比如可以遍历一个物体的所有子物体,全部制作成预制物体保存到你的工程中,代码如下:

01[MenuItem("Tools/BatchPrefab All Children")]
02public static void BatchPrefab(){
03    Transform tParent = ((GameObject)Selection.activeObject).transform;
04     
05    Object tempPrefab;
06    int i = 0;
07    foreach(Transform t in tParent){
08        tempPrefab = EditorUtility.CreateEmptyPrefab("Assets/Prefab/prefab" + i +".prefab");
09        tempPrefab = EditorUtility.ReplacePrefab(t.gameObject, tempPrefab);
10        i ++;
11    }
12}

上面代码中,在unity中添加了一个工具的菜单/BatchPrefab All Children(批量为所有子物体制作预物体),首先获取场景中选中的物体,遍历其所有子物体,为每一个子物体制作预制物体保存在工程种的目录下


6.unityGamma color space与Linear color space 互相转换

Shader "Custom/test" {    Properties {        _MainTex ("Albedo (RGB)", 2D) = "green" {}        _Test ("test", Float) = 2.2        _Color ("C", Color) = (1.0, 1.0,1.0,1.0)    }    SubShader    {        Tags { "RenderType"="Opaque" }             Pass        {            Name "FORWARD"            Tags { "LightMode" = "ForwardBase" }                     CGPROGRAM            #pragma vertex vert            #pragma fragment frag                     #include "UnityCG.cginc"            #include "Lighting.cginc"                     struct VS_OUTPUT            {                float4 pos : SV_POSITION;                float2 uv    : TEXCOORD0;            };                     uniform float _Test;                     float4 Gamma2Linear(float4 c)            {                return pow(c, _Test);            }             float4 Linear2Gamma(float4 c)            {                return pow(c, 1.0 / _Test);            }                     VS_OUTPUT vert(appdata_tan i)            {                VS_OUTPUT o;                o.pos = mul(UNITY_MATRIX_MVP, i.vertex);                o.uv = i.texcoord.xy;                             return o;            }                     uniform sampler _MainTex;                     float4 frag(VS_OUTPUT i): COLOR            {                float4 c = float4(i.uv.x, i.uv.x, i.uv.x, 1.0);                 return Linear2Gamma(c);            }                         ENDCG        }    }    FallBack "Diffuse"}



vegenarie said: ↑
i tried using pow(finalColor,2.2) in the frag function but it doesnt work.
Yeah, don't do that. Automatic behind-the-scenes gamma-correction is part of the Linear space. Check that your input textures don't have "bypass sRGB" or "mips in linear" checked. No pow(c,2.2) or pow(c,1/2.2) calls anywhere anymore!

You probably tweaked and fine-tuned lots of materials to look "ok" in Gamma space, now they seem initially unusually weird to you.. untweak your project ;)

Like kebrus said, gamma-space is an absolute no-no for games that have any kind of illumination. Learn to love Linear.

https://forum.unity3d.com/threads/about-gamma-correction.353987/

https://forum.unity3d.com/threads/problem-using-linear-space.253622/


7.Hue, saturation, brightness, contrast effect

http://gamedev.stackexchange.com/questions/28782/hue-saturation-brightness-contrast-effect-in-hlsl

http://lolengine.net/blog/2013/07/27/rgb-to-hsv-in-glsl

https://community.unity.com/t5/Shaders/Saturation-Shader/td-p/1520622

http://www.clonefactor.com/wordpress/program/unity3d/1513/

https://forum.unity3d.com/threads/hue-saturation-brightness-contrast-shader.260649/



8.Unity3D中目标相对自身的前后左右方位判断


1.判断目标在自己的前后方位可以使用下面的方法:

   Vector3.Dot(transform.forward, target.position)

       返回值为正时,目标在自己的前方,反之在自己的后方

2.判断目标在机子的左右方位可以使用下面的方法:

   Vector3.Cross(transform.forward, target.position).y

      返回值为正时,目标在自己的右方,反之在自己的左方


3.在这里顺便解说下关于空间向量的点积和叉积:

A.点积 
  点积的计算方式为:  a·b=|a|·|b|cos<a,b>  其中|a|和|b|表示向量的模,<a,b>表示两个向量的夹角。另外在 点积 中,<a,b>和<b,a> 夹角是不分顺序的。 
  所以通过点积,我们其实是可以计算两个向量的夹角的。 
  另外通过点积的计算我们可以简单粗略的判断当前物体是否朝向另外一个物体: 只需要计算当前物体的transform.forward向量与 otherObj.transform.position 的点积即可, 大于0则在前方,否则在后方。

B.叉积 
  叉积的定义: c =a x b  其中a,b,c均为向量。即两个向量的叉积得到的还是向量! 
  性质1: c⊥a,c⊥b,即向量c垂直与向量a,b所在的平面 。 
  性质2: 模长|c|=|a||b|sin<a,b> 
  性质3: 满足右手法则 。从这点我们有axb ≠ bxa,而axb = – bxa。所以我们可以使用叉积的正负值来判断向量a,b的相对位置,即向量b是处于向量a的顺时针方向还是逆时针方向

9.Unity3D鼠标滑轮控制放大缩小 

        if (Input.GetAxis("Mouse ScrollWheel")!=0)        {            cam.transform.position += cam.transform.forward* Input.GetAxis("Mouse ScrollWheel");        }

10.unity3d NGUI中获取/判断鼠标(左右键等)的方法

 在onclick(),等中,也可以使用,

  用UICamera.currentTouchID返回的值来判断

  -1为鼠标左键,

  -2为鼠标右键,

  -3为中间滚轮

11.GPU instancing

https://docs.unity3d.com/Manual/GPUInstancing.html

You can use GPU instancing to draw many identical objects with only a few draw calls. There are some restrictions that you need to bear in mind:

  • Your identical objects need to share the same Mesh and the same Material. You can, however, add per-instance data. SeeAdding per-instance data below for more information.
  • The MeshRenderer component and Graphics.DrawMesh API are supported.
  • GPU instancing is available on the following platforms:
    • Windows: DX11 and DX12 with SM 4.0 and above / OpenGL 4.1 and above
    • OS X and Linux: OpenGL 4.1 and above
    • Mobile: OpenGL ES 3.0 and above / Metal
    • PlayStation 4
    • Xbox One

Adding instancing to your objects

A Standard Surface Shader that supports instancing is available in the Unity Editor. Add one to your project by selectingShader >Standard Surface Shader (Instanced).

Adding the Standard Instanced ShaderAdding the Standard Instanced Shader

Apply this Shader to your GameObject’s Material. In your Material’s Inspector window, click theShader drop-down, roll over theInstanced field, and choose your instanced Shader from the list:

Assigning the Standard Instanced Shader to a MaterialAssigning the Standard Instanced Shader to a Material

Adding per-instance data

Even though the instanced GameObjects are sharing the same Mesh and Material, you can set Shader properties on a per-object basis using theMaterialPropertyBlock API. In the example below, each GameObject is assigned a random color value using the_Color property:

MaterialPropertyBlock props = new MaterialPropertyBlock();MeshRenderer renderer;foreach (GameObject obj in objects){   float r = Random.Range(0.0f, 1.0f);   float g = Random.Range(0.0f, 1.0f);   float b = Random.Range(0.0f, 1.0f);   props.SetColor("_Color", new Color(r, g, b));      renderer = obj.GetComponent<MeshRenderer>();   renderer.SetPropertyBlock(props);}

Adding instancing to your own shaders

The following example takes a simple unlit Shader and makes it capable of instancing:

Shader "SimplestInstancedShader"{    Properties    {        _Color ("Color", Color) = (1, 1, 1, 1)    }    SubShader    {        Tags { "RenderType"="Opaque" }        LOD 100        Pass        {            CGPROGRAM            #pragma vertex vert            #pragma fragment frag            #pragma multi_compile_instancing            #include "UnityCG.cginc"            struct appdata            {                float4 vertex : POSITION;                UNITY_INSTANCE_ID            };            struct v2f            {                float4 vertex : SV_POSITION;                UNITY_INSTANCE_ID            };            UNITY_INSTANCING_CBUFFER_START (MyProperties)            UNITY_DEFINE_INSTANCED_PROP (float4, _Color)            UNITY_INSTANCING_CBUFFER_END                       v2f vert (appdata v)            {                v2f o;                UNITY_SETUP_INSTANCE_ID (v);                UNITY_TRANSFER_INSTANCE_ID (v, o);                o.vertex = UnityObjectToClipPos (v.vertex);                return o;            }                       fixed4 frag (v2f i) : SV_Target            {                UNITY_SETUP_INSTANCE_ID (i);                return UNITY_ACCESS_INSTANCED_PROP (_Color);            }            ENDCG        }    }}

Added code

AdditionFunction#pragma multi_compile_instancingmulti_compile_instancing generates a Shader with two variants: one with built-in keywordINSTANCING_ON defined (allowing instancing), the other with nothing defined. This allows the Shader to fall back to a non-instanced version if instancing isn’t supported on the GPU.UNITY_INSTANCE_IDThis is used in the vertex Shader input/output structure to define an instance ID. SeeSV_InstanceID for more information.UNITY_INSTANCING_CBUFFER_START(name) /UNITY_INSTANCING_CBUFFER_ENDEvery per-instance property must be defined in a specially named constant buffer. Use this pair of macros to wrap the properties you want to be made unique to each instance.UNITY_DEFINE_INSTANCED_PROP(float4, color)This defines a per-instance Shader property with a type and a name. In this example, the_color property is unique.UNITY_SETUP_INSTANCE_ID(v);This makes the instance ID accessible to Shader functions. It must be used at the very beginning of a vertex Shader, and is optional for fragment Shaders.UNITY_TRANSFER_INSTANCE_ID(v, o);This copies the instance ID from the input structure to the output structure in the vertex Shader. This is only necessary if you need to access per-instance data in the fragment Shader.UNITY_ACCESS_INSTANCED_PROP(color)This accesses a per-instance Shader property. It uses an instance ID to index into the instance data array.

Note: As long as Material properties are instanced, Renderers can always be rendered instanced, even if you put different instanced properties into different Renderers. Normal (non-instanced) properties cannot be batched, so do not put them in the MaterialPropertyBlock. Instead, create different Materials for them.

A note regarding UnityObjectToClipPos

UnityObjectToClipPos(v.vertex) is always preferred where mul(UNITY_MATRIX_MVP,v.vertex) would otherwise be used. While you can continue to useUNITY_MATRIX_MVP as normal in instanced Shaders,UnityObjectToClipPos is the most efficient way of transforming vertex positions from object space into clip space.

In instanced Shaders, UNITY_MATRIX_MVP (among other built-in matrices) is transparently modified to include an extra matrix multiply. Specifically, it is expanded tomul(UNITY_MATRIX_VP, unity_ObjectToWorld).unity_ObjectToWorld is expanded tounity_ObjectToWorldArray[unity_InstanceID]).

UnityObjectToClipPos is optimized to perform two matrix-vector multiplications simultaneously, and is therefore more efficient than performing the multiplication manually, because the Shader compiler does not automatically perform this optimization.

Modifying multi-pass Shaders to work with instancing

For vertex and fragment Shaders, Unity needs to change the way vertex transformations are calculated in multi-pass scenarios (for example, in the ForwardAdd pass) to avoid z-fighting artifacts against the base/first passes due to floating point errors in matrix calculation. To do this, add #pragma force_concat_matrix to the Shader.

Specifically, the vertex transformation in the ForwardAdd pass is calculated by multiplying the M (model) matrix with the VP (view and projection) matrix instead of using a CPU-precomputed MVP matrix.

This is not necessary for surface Shaders, because the correct calculation is automatically substituted.

Batching priority

Static batching takes priority over instancing. If a GameObject is marked for static batching and is successfully batched, instancing is disabled even if its Renderer uses an instancing Shader. When this happens, a warning box appears in the Inspector suggesting that the Static Batching flag be unchecked in the Player Settings.

Instancing takes priority over dynamic batching. If Meshes can be instanced, dynamic batching is disabled.

Further notes

  • Instanced draw calls appear in the Frame Debugger as Draw Mesh (instanced).
  • When writing or modifying your own Shaders, don’t forget to instance shadows, too. For a surface Shader, use theaddshadow option to force the generation of an instanced shadow pass.
  • You don’t have to define per-instance properties, but setting up an instance ID is mandatory, because world matrices need it to work correctly.
  • When using forward rendering, objects affected by multiple lights can’t be instanced efficiently. Only the base pass can make effective use of instancing, not the add passes.
  • Objects that use lightmaps, or are affected by different light or reflection probes, can’t be instanced.
  • If you have more than two passes for multi-pass Shaders, only the first passes can be instanced. This is because Unity forces the later passes to be rendered together for each object.
  • D3D constant buffers have a maximum size of 64KB. For OpenGL, it’s usually 16KB. You will reach this limit if you try to define too many per-instance properties. The Shaders may fail to compile or, even worse, the Shader compiler might crash. To work around this, you have to balance between the size of the batch and the size of per-instance properties. DefiningUNITY_MAX_INSTANCE_COUNT with an integer before including any .cginc file allows you to limit the maximum number of instances an instanced draw call can draw. This allows for more properties per instance in the instance constant buffer. You can achieve the same result when using a surface Shader with#pragma instancing_options maxcount:number. The default value of this max instance count is 500. For OpenGL, the actual value is one quarter of the value you specify, so 125 by default.
  • All the Shader macros used in the above example are defined in UnityInstancing.cginc. Find this file in[Unity folder]\Editor\Data\CGIncludes.

12.Texture arrays

https://docs.unity3d.com/Manual/SL-TextureArrays.html


Similar to regular 2D textures (Texture2D class,sampler2D in shaders), cube maps (Cubemap class,samplerCUBE in shaders), and 3D textures (Texture3D class,sampler3D in shaders), Unity also supports 2D texture arrays.

A texture array is a collection of same size/format/flags 2D textures that look like a single object to the GPU, and can be sampled in the shader with a texture element index. They are useful for implementing custom terrain rendering systems or other special effects where you need an efficient way of accessing many textures of the same size and format. Elements of a 2D texture array are also known as slices, or layers.

Platform Support

Texture arrays need to be supported by the underlying graphics API and the GPU. They are available on:

  • Direct3D 11/12 (Windows, Xbox One)
  • OpenGL Core (Mac OS X, Linux)
  • Metal (iOS, Mac OS X)
  • OpenGL ES 3.0 (Android, iOS, WebGL 2.0)
  • PlayStation 4

Other platforms do not support texture arrays (Direct3D 9, OpenGL ES 2.0 or WebGL 1.0). UseSystemInfo.supports2DArrayTextures to determine texture array support at runtime.

Creating and manipulating texture arrays

As there is no texture import pipeline for texture arrays, they must be created from within your scripts. Use theTexture2DArray class to create and manipulate them. Note that texture arrays can be serialized as assets, so it is possible to create and fill them with data from editor scripts.

Normally, texture arrays are used purely within GPU memory, but you can use Graphics.CopyTexture, Texture2DArray.GetPixels and Texture2DArray.SetPixels to transfer pixels to and from system memory.

Using texture arrays as render targets

Texture array elements may also be used as render targets. Use RenderTexture.dimension to specify in advance whether the render target is to be a 2D texture array. ThedepthSlice argument toGraphics.SetRenderTarget specifies which mipmap level or cube map face to render to. On platforms that support “layered rendering” (i.e. geometry shaders), you can set thedepthSlice argument to –1 to set the whole texture array as a render target. You can also use a geometry shader to render into individual elements.

Using texture arrays in shaders

Since texture arrays do not work on all platforms, shaders need to use an appropriatecompilation target to access them. The minimum shader model compilation target that supports texture arrays is 3.5.

Use these macros to declare and sample texture arrays:

  • UNITY_DECLARE_TEX2DARRAY(name) declares a texture array sampler variable inside HLSL code.
  • UNITY_SAMPLE_TEX2DARRAY(name,uv) samples a texture array with a float3 UV; the z component of the coordinate is an array element index.
  • UNITY_SAMPLE_TEX2DARRAY_LOD(name,uv,lod) samples a texture array with an explicit mipmap level.

Examples

The following shader example samples a texture array using object space vertex positions as coordinates:

Shader "Example/Sample2DArrayTexture"{    Properties    {        _MyArr ("Tex", 2DArray) = "" {}        _SliceRange ("Slices", Range(0,16)) = 6        _UVScale ("UVScale", Float) = 1.0    }    SubShader    {        Pass        {            CGPROGRAM            #pragma vertex vert            #pragma fragment frag            // to use texture arrays we need to target DX10/OpenGLES3 which            // is shader model 3.5 minimum            #pragma target 3.5                        #include "UnityCG.cginc"            struct v2f            {                float3 uv : TEXCOORD0;                float4 vertex : SV_POSITION;            };            float _SliceRange;            float _UVScale;            v2f vert (float4 vertex : POSITION)            {                v2f o;                o.vertex = mul(UNITY_MATRIX_MVP, vertex);                o.uv.xy = (vertex.xy + 0.5) * _UVScale;                o.uv.z = (vertex.z + 0.5) * _SliceRange;                return o;            }                        UNITY_DECLARE_TEX2DARRAY(_MyArr);            half4 frag (v2f i) : SV_Target            {                return UNITY_SAMPLE_TEX2DARRAY(_MyArr, i.uv);            }            ENDCG        }    }}

13.shader model对应关系

https://docs.unity3d.com/Manual/GraphicsEmulation.html

Shader Model 4 (Standalone & Windows Store platforms)

  • Emulates DirectX 10 feature set (PC GPUs made during 2007–2009).
  • Turns off support for compute Shaders and related features (compute buffers, random-write Textures),sparse Textures, and tessellation Shaders.

Shader Model 3 (Standalone platform)

  • Emulates DirectX 9 SM3.0 feature set (PC GPUs made during 2004–2006).
  • In addition to features turned off by Shader Model 4 emulation, this also turns off support fordraw call instancing,Texture Arrays, and geometry Shaders. It enforces a maximum of 4 simultaneous render targets, and a maximum of 16 Textures used in a single Shader.

Shader Model 2 (Standalone platform)

  • Emulates DirectX 9 SM2.0 feature set (PC GPUs made during 2002–2004).
  • In addition to features turned off by Shader Model 3 emulation, this also turns off support forHDR rendering,Linear color space anddepth Textures.

OpenGL ES 3.0 (Android platform)

  • Emulates mobile OpenGL ES 3.0 feature set.
  • Turns off support for compute Shaders and related features (compute buffers, random-write Textures),sparse Textures, tessellation Shaders and geometry Shaders. Enforces maximum of 4 simultaneous render targets, and maximum of 16 Textures used in a single Shader. Maximum allowed Texture size is set to 4096, and maximum cubemap size to 2048. Realtime soft shadows are disabled.

Metal (iOS, tvOS platforms)

  • Emulates mobile Metal feature set.
  • Same restrictions applied as GLES3.0 emulation, except that the maximum cubemap size is set to 4096.

OpenGL ES 2.0 (Android, iOS, tvOS, Tizen platforms)

  • Emulates mobile OpenGL ES 2.0 feature set.
  • In addition to features turned off by GLES3.0 emulation, this also turns off support fordraw call instancing,Texture arrays, 3D Textures and multiple render targets. Enforces a maximum of 8 Textures used in a single Shader. Maximum allowed cubemap size is set to 1024.

WebGL 1 and WebGL 2 (WebGL platform)

  • Emulates typical WebGL graphics restrictions.
  • Very similar to GLES2.0 and GLES3.0 emulation levels above, except that supported Texture sizes are higher (8192 for regular Textures, 4096 for cubemaps), and 16 Textures are allowed in a single Shader.

Shader Model 2 - DX11 FL9.3 (Windows Store Platform)

  • Emulates typical Windows Phone graphics feature set.
  • Very similar to Shader Model 2 emulation, but also disables multiple render targets and separate alpha blending.

14.unity shader flag

https://docs.unity3d.com/ScriptReference/MaterialProperty.PropFlags.html

Description

Flags that control how a MaterialProperty is displayed.

Variables

NoneNo flags are set.HideInInspectorDo not show the property in the inspector.PerRendererDataTexture value for this property will be queried from renderer's MaterialPropertyBlock, instead of from the material. This corresponds to the "[PerRendererData]" attribute in front of a property in the shader code.NoScaleOffsetDo not show UV scale/offset fields next to a texture.NormalSignifies that values of this property contain Normal (normalized vector) data.HDRSignifies that values of this property contain High Dynamic Range (HDR) data.

15.unity 获取材质中贴图/材质本地路径(任意东西的路径都能得到)

https://docs.unity3d.com/ScriptReference/AssetDatabase.GetAssetPath.html


AssetDatabase.GetAssetPath

public static string GetAssetPath(int instanceID);
public static string GetAssetPath(Object assetObject);

Parameters

instanceIDThe instance ID of the asset.assetObjectA reference to the asset.

Returns

string The asset path name, or null, or an empty string if the asset does not exist.

Description

Returns the path name relative to the project folder where the asset is stored.

All paths are relative to the project folder, for example: "Assets/MyTextures/hello.png".

using UnityEngine;using UnityEditor;public class CreateMaterialExample : MonoBehaviour{[MenuItem("GameObject/Create Material")]static void CreateMaterial( ){// Create a simple material assetMaterial material = new Material( Shader.Find( "Specular" ) );AssetDatabase.CreateAsset( material, "Assets/MyMaterial.mat" );// Print the path of the created assetDebug.Log( AssetDatabase.GetAssetPath( material ) );}}
  Debug.Log(AssetDatabase.GetAssetPath(this.GetComponent<Renderer>().material.GetTexture("_MainTex")));








2 0