AR儿童绘本技术实现-实时着色

来源:互联网 发布:南风知我意2txt下载百 编辑:程序博客网 时间:2024/04/27 17:38

相比静态着色,实时着色更为先进,实时着色可以边画边预览我们涂色的结果,上色贴图原理与静态着色差不多,但他是根据相机看到的结果不停给模型更换贴图,如果我们不停的获取屏幕贴图来达到效果,消耗的手机资源量大,内存也不停的在增加,玩不了多久,手机会卡顿。为解决这个问题,在Unity3d中可以借助强大的相机(RenderTexture)渲染纹理来解决。

实时涂色效果

大概的流程:
1. 自己创建一个相机,生成一个RenderTexture
2. 裁剪RenderTexture
3. 给需要涂色的material赋值

a.自定义相机的创建:

AR_Camera_Vector = GameObject.Find ("AR Camera Vector");if (AR_Camera_Vector == null) {    AR_Camera_Vector = new GameObject ("AR Camera Vector");    Vector_Is_Created = true;}if (Vector_Is_Created) {Vector_Is_Created = false;AR_Camera_Vector.transform.parent = ARCamera.transform;AR_Camera_Vector.transform.localPosition = Vector3.zero;#if UNITY_EDITORAR_Camera_Vector.transform.localPosition = new Vector3 (0.0f, ImageTarget.GetComponent<ImageTargetBehaviour> ().GetSize ().x / 240.0f, 0.0f);#endifAR_Camera_Vector.transform.localEulerAngles = new Vector3 (0.0f, 180.0f, 180.0f);AR_Camera_Vector.transform.localScale = new Vector3 (1.0f, 1.0f, 1.0f);#if !UNITY_EDITORif (Screen.orientation == ScreenOrientation.Portrait || Screen.orientation == ScreenOrientation.PortraitUpsideDown) AR_Camera_Vector.transform.localScale = new Vector3 ((float)CPW/(float)CPH, (float)CPH/(float)CPW, 1.0f);#endif}

b.为相机创建RenderTexture:

CameraOutputTexture = new RenderTexture(TextureResolutionX, TextureResolutionY, 0);CameraOutputTexture.Create();Render_Texture_Camera.GetComponent<Camera>().targetTexture = CameraOutputTexture;

c.检查4个角点是否全部可见:

Vector3 boundPoint1 = GetComponent<Renderer> ().bounds.min;Vector3 boundPoint2 = GetComponent<Renderer> ().bounds.max;Vector3 boundPoint3 = new Vector3 (boundPoint1.x, boundPoint1.y, boundPoint2.z);Vector3 boundPoint4 = new Vector3 (boundPoint2.x, boundPoint1.y, boundPoint1.z);Vector3 screenPos1 = Child_AR_Camera.WorldToScreenPoint (boundPoint1);Vector3 screenPos2 = Child_AR_Camera.WorldToScreenPoint (boundPoint2);Vector3 screenPos3 = Child_AR_Camera.WorldToScreenPoint (boundPoint3);Vector3 screenPos4 = Child_AR_Camera.WorldToScreenPoint (boundPoint4);if (screenPos1.x < 0 || screenPos1.y < 0 || screenPos2.x < 0 || screenPos2.y < 0 || screenPos3.x < 0 || screenPos3.y < 0 || screenPos4.x < 0 || screenPos4.y < 0 || screenPos1.x > CPW || screenPos1.y > CPH || screenPos2.x > CPW || screenPos2.y > CPH || screenPos3.x > CPW || screenPos3.y > CPH || screenPos4.x > CPW || screenPos4.y > CPH) {if (!MarkerIsOUT) {    StartCoroutine(Start_MarkerOutOfBounds());    MarkerIsOUT = true;    MarkerIsIN = false;    }}else { if (!MarkerIsIN) {    StartCoroutine(Start_MarkerIsReturned());    MarkerIsIN = true;    }MarkerIsOUT = false; }

d.捕获相机纹理:

Matrix4x4 M = transform.localToWorldMatrix;Matrix4x4 V = AR_Camera_Vector.transform.worldToLocalMatrix;Matrix4x4 P = Vuforia.VuforiaUnity.GetProjectionGL (0,0,0);GetComponent<Renderer>().material.SetMatrix("_MATRIX_MVP", P * V * M);

e.屏幕坐标转换:

Shader "Custom/Set_Matrix_To_Material" {    Properties {        _MainTex("Texture", 2D) = "white" {}        _BackTex("Texture", 2D) = "white" {}        _Alpha ("AlphaFront", Range(0,1)) = 1        _AlphaBack ("AlphaBack", Range(0,1)) = 0    }    SubShader{      Tags {"Queue"="Transparent" "IgnoreProjector"="True" "RenderType"="Transparent"}            Pass{            CGPROGRAM            #pragma vertex vert            #pragma fragment frag            #include "UnityCG.cginc"            struct appdata            {                float4 vertex : POSITION;                float2 uv : TEXCOORD0;            };            fixed _AlphaBack;            struct v2f            {                float2 uv : TEXCOORD0;                float4 vertex : SV_POSITION;            };            v2f vert (appdata v)            {                v2f o;                o.vertex = mul(UNITY_MATRIX_MVP, v.vertex);                o.uv = v.uv;                return o;            }            sampler2D _BackTex;            fixed4 frag (v2f i) : SV_Target            {                fixed4 col = tex2D(_BackTex, i.uv);                col.a = _AlphaBack;                return col;            }            ENDCG        }    Pass{    Cull Back    ZWrite Off    Blend SrcAlpha OneMinusSrcAlpha    CGPROGRAM    #pragma vertex vert    #pragma fragment frag    #include "UnityCG.cginc"    sampler2D _MainTex;    float4x4 _MATRIX_MVP;    float _KX;    float _KY;   int _KR = 1;   int _KG = 1;    fixed _Alpha;    struct v2f{        float4  pos : SV_POSITION;        float2  uv : TEXCOORD0;    };    v2f vert(appdata_base v){        v2f o;        float2 screenSpacePos;        float4 clipPos;        //Convert position from world space to clip space.        //Only the UV coordinates should be frozen, so use a different matrix        clipPos = mul(_MATRIX_MVP, v.vertex);        //Convert position from clip space to screen space.        //Screen space has range x=-1 to x=1        screenSpacePos.x = clipPos.x / clipPos.w;        screenSpacePos.y = clipPos.y / clipPos.w;        //the screen space range (-1 to 1) has to be converted to        //the UV range 0 to 1         o.uv.x = (_KX*screenSpacePos.x) + _KX;        o.uv.y = (_KY*screenSpacePos.y) + _KY;        //The position of the vertex should not be frozen, so use         //the standard UNITY_MATRIX_MVP matrix        o.pos = mul(UNITY_MATRIX_MVP, v.vertex);        return o;    }    half4 frag(v2f i) : COLOR{        fixed4 col = tex2D(_MainTex, i.uv);             if (i.uv.x < 0 || i.uv.x > _KX*2 || i.uv.y < 0 || i.uv.y > _KY*2) {col.rgb = 1; col.a = 0;}        else col.a = _Alpha;        col.r *= _KR + 1;        col.g *= _KG + 1;        return col;    }    ENDCG        }    }}

f.给材质赋值纹理:

if (RenderTextureCamera.targetTexture)GetComponent<Renderer>().materials[materialIndex].SetTexture("_MainTex", RenderTextureCamera.targetTexture);

demo下载地址:
链接:https://pan.baidu.com/s/1hsPrXAk 密码:ls9f

此为简化版的教程,本人已经开发多款AR产品,有任何问题可以联系我:11413947

1 0
原创粉丝点击