unity3D 代码实现自定义平面
来源:互联网 发布:哥贝克力 知乎 编辑:程序博客网 时间:2024/05/20 14:19
看了LowPoly风格的图片之后被里面的海水效果深深的吸引住了,经过一番思考和借鉴后做出了这样的效果
为了自定义平面的长宽,我们需要设置顶点位置和设置三角形绘制顺序。
设置顶点位置
顶点的设置就按照顺序将顶点排列成为一个矩阵,就像军训的方队一样,可以横着排,竖着排,排布成为一个平面就可以啦。主要讲一下三角形的绘制。
三角形的绘制
一个矩形平面由两个三角形组成,像这样–》
在unity中逆时针绘制三角形为正面,背面是被剔除了的,不可见的,所以我们需要看那面就按照顺时针设置顶点顺序。
那么如果我们要设置上面图片中的矩形正对着我们的为正面的话就应该将顶点顺序设置为:
0,2,3,3,1,0
下面是实现过程:
创建一个空物体,为它添加三个组件
1.新建的C#脚本
2.Mesh Filter
3.Mesh Renderer
脚本代码:
public class MeshTest : MonoBehaviour { private Mesh mesh; //设置顶点位置 Vector3[] setVertex() { Vector3[] vertexs = new Vector3[4]; //约定第一个顶点在原点,从左向右,从上到下的顺序排序 vertexs[0] = new Vector3(0, 0, 0); vertexs[1] = new Vector3(1, 0, 0); vertexs[2] = new Vector3(0, 1, 0); vertexs[3] = new Vector3(1, 1, 0); return vertexs; } int[] setTriangle() { //两个三角形6个顶点 int[] triangles = new int[6]; //在顺序数组中保存的就是顶点在顶点数组中的下标位置 triangles[0] = 0; triangles[1] = 2; triangles[2] = 3; //第二个三角形 triangles[3] = 3; triangles[4] = 1; triangles[5] = 0; return triangles; } // Use this for initialization void Start () { mesh = GetComponent<MeshFilter>().mesh; mesh.vertices = setVertex(); mesh.triangles = setTriangle(); }}
不出意外的话会看见这个:
哈哈,已经成功的绘制出来一个啦,那么一个巨大的平面就可以用更多的小的这样的矩形组成了,那么我自己想了一个笨拙的办法看图:
将顶点数组从第一个数开始向后遍历,为了便于理解将第一个三角形的顺序保存在变量a,b,c中,第二个三角形的保存在a2,b2,c2中。
那么第一个矩形的顺序为,a = 0,b = 4,c = 5,a2 = 5,b2 = 1,c2 = 0,
那么我们可以发现,a 就为正在遍历的顶点的位置0,b 就为 a的值加上整个大平面的宽(指的是顶点数量),c 就为b 加一,a2 在这里我设置和c相同的值5,b2 就为 a2 减去宽,c2 就回到了数组正在遍历的顶点。
当第一个矩形设置完成后,就又从第二个顶点开始做重复的设置,直到倒数第二排的倒数第二个顶点,因为四个顶点为一个矩形嘛。(这里要注意遍历顶点的时候都要跳过每一排的最后一个,原因嘛,嘿嘿,自己画一下图就知道了)
整个代码:
public class WaterMesh: MonoBehaviour { public float scals;//顶点间距 public int wide;//整体宽度,这里的宽度为一条边的顶点数 public int height;//整体高度,这里的高度也为一条边的顶点数 private Mesh mesh;//网格 private MeshCollider meshCollider;//网格碰撞器 //计算 Vector3[] setVertex(int wide,int height){ //储存顶点信息的数组 Vector3[] vects = new Vector3[wide*height]; int vectsCount = 0; //排布顶点位置,可以根据自己的需求设置 for(int i=0;i>-height;i--){ for(int j=0;j>-wide;j--){ vects [vectsCount++] = new Vector3 (j*wide,0,i*wide); } } return vects; } //设置三角形 //默认顶点绘制顺序为逆时针为正面 int[] setTriangle(int wide,int height){ //保存总的设置顺序 int[] tris = new int[(wide-1)*(height-1)*6];//整型数组大小=矩形数量*6个顺序点 //保存绘制一个矩形的顺序 int[] tris_Per = new int[6]; int a, b, c, a2, b2, c2,count_x=wide-1,count_y=height-1,trisCount = 0; for(int i=0;i<wide*height;i++){ //边界判断 if(count_y <= 0){ break; } //边界判断 if (count_x > 0) { //计算各点的绘制顺序 //计算的方法完全取决于自己,只需要按照逆时针规则就可以 a = i; b = a + wide; c = b + 1; a2 = c; b2 = a2 - wide; c2 = b2 - 1; //print (" a: "+a+" b: "+b+" c: "+c+" a2: "+a2+" b2: "+b2+" c2: "+c2); //将计算好的一个矩形顺序结果保存到结果集 tris_Per [0] = a; tris_Per [1] = b; tris_Per [2] = c; tris_Per [3] = a2; tris_Per [4] = b2; tris_Per [5] = c2; //将单个矩形绘制顺序的结果保存到总的顺序集 for(int j=0;j<6;j++){ tris [trisCount++] = tris_Per [j]; //print (tris_Per[j]); } } else { //处理越界 count_x = wide - 1; count_y--; continue; } //处理越界 count_x--; } return tris; } void Awake(){ mesh = GetComponent<MeshFilter> ().mesh; meshCollider = GetComponent<MeshCollider> (); mesh.vertices = setVertex (wide,height); mesh.triangles = setTriangle (wide,height); meshCollider.sharedMesh = mesh; } void Start () { } void Update () { }}
要实现图1的效果还需要一个模拟波浪的脚本和LowPoly风格的shader,这两个就不是很理解啦,在这里发一下大神的代码去实现这个效果吧。
public class Waves : MonoBehaviour { public float waveHeight = 0.1f; public float speed = 1.0f; public float waveLength = 1.0f; public float waveLength_Other = 1.0f; public float randomHeight = 0.2f; public float randomSpeed = 5.0f; private Vector3[] baseHeight; private Vector3[] vertices; private List<float> perVertexRandoms = new List<float>(); private Mesh mesh; private MeshCollider meshCollider; void Awake() { } void Start(){ mesh = GetComponent<MeshFilter>().mesh; meshCollider = GetComponent<MeshCollider> (); if (baseHeight == null) { baseHeight = mesh.vertices; } for(int i=0; i < baseHeight.Length; i++) { perVertexRandoms.Add(Random.value * randomHeight); } } void Update () { if (vertices == null) { vertices = new Vector3[baseHeight.Length]; } for (int i=0;i<vertices.Length;i++) { Vector3 vertex = baseHeight[i]; Random.seed = (int)(vertex.x * vertex.x + vertex.z * vertex.z); vertex.y += Mathf.Sin (Mathf.Cos(Random.value*Time.time)); vertex.y += Mathf.Sin(Time.time * speed + baseHeight[i].x * waveLength + baseHeight[i].y * waveLength) * waveHeight; vertex.y += Mathf.Sin(Mathf.Cos(Random.value * 1.0f) * randomHeight * Mathf.Cos (Time.time * randomSpeed * Mathf.Sin(Random.value * 1.0f))); vertex.z += Mathf.Cos(Time.time +baseHeight[i].x * baseHeight[i].x + baseHeight[i].z * baseHeight[i].z )*waveLength_Other * Random.value; vertex.x += Mathf.Cos(Time.time +baseHeight[i].x * baseHeight[i].x + baseHeight[i].z * baseHeight[i].z )*waveLength_Other * Random.value; vertices[i] = vertex; } mesh.vertices = vertices; meshCollider.sharedMesh = null; meshCollider.sharedMesh = mesh; mesh.RecalculateNormals(); }}
shader:创建一个材质球,再创建一个Unlit Shader,把下面的shader代码写进去,再把shader弄到材质球上,最后给我们创建的空物体使用这个材质球,运行就可以看到图一的效果了。
Shader "LowPoly/LowPoly"{ Properties { _Color("Color", Color) = (1,1,1,1) _MainTex("Albedo", 2D) = "white" {} _Frequency ("Distortion Frequency",Float) = 1 _Magnitude ("Distortion Magnitude",Float) = 1 _InvWaveLength ("Distortion Inverse Wave Length",Float) = 10 _Speed ("Speed",Float) = 0.5 _SpecColor ("SpecColor",Color) = (1,1,1,1) _Shininess ("Shininess",Float) = 1 } SubShader { Tags{ "RenderType" = "Opaque" } Blend SrcAlpha OneMinusSrcAlpha Pass { Tags { "LightMode" = "ForwardBase"} CGPROGRAM #include "UnityCG.cginc" #include "AutoLight.cginc" #include "Lighting.cginc" #pragma vertex vert #pragma geometry geom #pragma fragment frag #pragma multi_compile_fwdbase uniform float4 _Color; uniform sampler2D _MainTex; float _Frequency; float _Magnitude; float _InvWaveLength; float _Speed; //fixed4 _SpecColor; float _Shininess; struct v2g { float4 pos : SV_POSITION; float3 norm : NORMAL; float2 uv : TEXCOORD0; float3 vertex : TEXCOORD1; float3 vertexLighting : TEXCOORD2; }; struct g2f { float4 pos : SV_POSITION; float3 norm : NORMAL; float2 uv : TEXCOORD0; float4 posWorld : TEXCOORD1; float3 vertexLighting : TEXCOORD2; LIGHTING_COORDS(3, 4) }; v2g vert(appdata_full v) { v2g OUT; float4 offset; offset.xzw = float3(0,0,0); //offset.y = sin(_Frequency * _Time.y + v.vertex .x * _InvWaveLength + v.vertex.y * //_InvWaveLength + v.vertex.z * _InvWaveLength) * _Magnitude; //offset.y = _Time.y; OUT.pos = mul(UNITY_MATRIX_MVP, v.vertex); OUT.norm = v.normal; OUT.uv = v.texcoord; //change OUT.vertex = v.vertex; float3 vertexLighting = float3(0, 0, 0); #ifdef VERTEXLIGHT_ON for (int index = 0; index < 4; index++) { float3 normalDir = normalize(mul(float4(v.normal, 0.0), unity_WorldToObject).xyz); float3 lightPosition = float3(unity_4LightPosX0[index], unity_4LightPosY0[index], unity_4LightPosZ0[index]); float3 vertexToLightSource = lightPosition - mul(unity_ObjectToWorld, v.vertex); float3 lightDir = normalize(vertexToLightSource); float distanceSquared = dot(vertexToLightSource, vertexToLightSource); float attenuation = 1.0 / (1.0 + unity_4LightAtten0[index] * distanceSquared); vertexLighting += attenuation * unity_LightColor[index].rgb * _Color.rgb * saturate(dot(normalDir, lightDir)); } #endif OUT.vertexLighting = vertexLighting; return OUT; } [maxvertexcount(3)] void geom(triangle v2g IN[3], inout TriangleStream<g2f> triStream) { float3 v0 = IN[0].pos.xyz; float3 v1 = IN[1].pos.xyz; float3 v2 = IN[2].pos.xyz; g2f OUT; OUT.norm = normalize(IN[0].norm + IN[1].norm + IN[2].norm); OUT.uv = (IN[0].uv + IN[1].uv + IN[2].uv) / 3; OUT.vertexLighting = (IN[0].vertexLighting + IN[1].vertexLighting + IN[2].vertexLighting) / 3; OUT.posWorld = mul(unity_ObjectToWorld, (IN[0].vertex + IN[1].vertex + IN[2].vertex) / 3); OUT.pos = IN[0].pos; TRANSFER_VERTEX_TO_FRAGMENT(OUT); triStream.Append(OUT); OUT.pos = IN[1].pos; TRANSFER_VERTEX_TO_FRAGMENT(OUT); triStream.Append(OUT); OUT.pos = IN[2].pos; TRANSFER_VERTEX_TO_FRAGMENT(OUT); triStream.Append(OUT); } half4 frag(g2f IN) : COLOR { float3 normalDir = normalize(mul(float4(IN.norm, 0.0), unity_WorldToObject).xyz); float3 lightDir = normalize(_WorldSpaceLightPos0.xyz); float3 ambientLight = UNITY_LIGHTMODEL_AMBIENT.rgb * _Color.rgb; UNITY_LIGHT_ATTENUATION(atten, IN, IN.posWorld); float3 diffuseReflection = atten * _LightColor0.rgb * _Color.rgb * saturate(dot(normalDir, lightDir)); //mirror float3 viewDirection = normalize(UnityWorldSpaceViewDir(IN.posWorld)); float3 specularReflection=_LightColor0.rgb*_SpecColor.rgb*pow(max(0.0,dot(reflect(-lightDir, normalDir),viewDirection)),_Shininess); float4 colorTex = tex2D(_MainTex, IN.uv); return float4((IN.vertexLighting + ambientLight + diffuseReflection + specularReflection) * colorTex, 1); } ENDCG } Pass { Tags{ "LightMode" = "ForwardAdd" } Blend One One ZWrite Off CGPROGRAM #pragma vertex vert #pragma geometry geom #pragma fragment frag #pragma multi_compile_fwdadd_fullshadows #include "UnityCG.cginc" #include "AutoLight.cginc" #include "Lighting.cginc" uniform float4 _Color; uniform sampler2D _MainTex; struct v2g { float4 pos : SV_POSITION; float3 norm : NORMAL; float3 vertex : TEXCOORD0; float3 uv : TEXCOORD1; }; struct g2f { float4 pos : SV_POSITION; float3 norm : NORMAL; float3 posWorld : TEXCOORD0; float3 uv : TEXCOORD1; LIGHTING_COORDS(3, 4) }; // hack because TRANSFER_VERTEX_TO_FRAGMENT has harcoded requirement for 'v.vertex' struct unityTransferVertexToFragmentSucksHack { float3 vertex : POSITION; }; v2g vert(appdata_full v) { v2g OUT; OUT.pos = mul(UNITY_MATRIX_MVP, v.vertex); OUT.norm = v.normal; OUT.vertex = v.vertex; OUT.uv = v.texcoord; return OUT; } [maxvertexcount(3)] void geom(triangle v2g IN[3], inout TriangleStream<g2f> triStream) { float3 v0 = IN[0].pos.xyz; float3 v1 = IN[1].pos.xyz; float3 v2 = IN[2].pos.xyz; g2f OUT; OUT.norm = normalize(IN[0].norm + IN[1].norm + IN[2].norm); OUT.uv = (IN[0].uv + IN[1].uv + IN[2].uv) / 3; OUT.posWorld = mul(unity_ObjectToWorld, (IN[0].vertex + IN[1].vertex + IN[2].vertex) / 3); unityTransferVertexToFragmentSucksHack v; OUT.pos = IN[0].pos; v.vertex = IN[0].vertex; TRANSFER_VERTEX_TO_FRAGMENT(OUT); triStream.Append(OUT); OUT.pos = IN[1].pos; v.vertex = IN[1].vertex; TRANSFER_VERTEX_TO_FRAGMENT(OUT); triStream.Append(OUT); OUT.pos = IN[2].pos; v.vertex = IN[2].vertex; TRANSFER_VERTEX_TO_FRAGMENT(OUT); triStream.Append(OUT); } float4 frag(g2f IN) : COLOR { float3 normalDir = normalize(mul(float4(IN.norm, 0.0), unity_WorldToObject).xyz); float3 vertexToLight = _WorldSpaceLightPos0.w == 0 ? _WorldSpaceLightPos0.xyz : _WorldSpaceLightPos0.xyz - IN.posWorld.xyz; float3 lightDir = normalize(vertexToLight); UNITY_LIGHT_ATTENUATION(atten, IN, IN.posWorld); float3 diffuseReflection = atten * _LightColor0.rgb * _Color.rgb * saturate(dot(normalDir, lightDir)); float4 colorTex = tex2D(_MainTex, IN.uv); return float4(diffuseReflection * colorTex, 1); } ENDCG } } Fallback "Standard"}
- unity3D 代码实现自定义平面
- Unity3D 游戏引擎之实现平面多点触摸(二)
- Unity3D 游戏引擎之实现平面多点触摸
- Unity3D 游戏引擎之实现平面多点触摸(二)
- Unity3D研究院之IOS实现平面多点触摸(二)
- Unity3D 游戏引擎之实现平面多点触摸(二)
- Unity3D 游戏引擎之实现平面多点触摸
- Unity3D 自定义光照模型实现
- Unity3D-代码实现GameObject创建
- BaseAdapter自定义实现代码
- 代码实现自定义布局
- Unity3D For iPhone游戏引擎之实现平面多点触摸(二)
- Unity3D 调用GPS位置服务实现代码
- unity3d 小地图的实现 脚本代码
- Unity3D 关于模型变形技术代码实现
- iOS 代码实现自定义TableView 自定义TableViewCell
- Unity3D之平面小球重力感应详解
- unity3d教程动态创建简单平面地形
- 用实例代码理解一下c++11”定义析构函数阻止合成移动”
- 幻境.最后一天
- 1、全志A33烧录固件(TF卡刷机)
- 我为什么要写博客
- Canvas 与 SVG 的比较
- unity3D 代码实现自定义平面
- Java
- 工厂模式区别
- git 删除本地追踪远程仓库但远程仓库已经不存在的分支
- 基于pyspider的大众点评数据爬取总结
- 网络图片加载
- maven gatling 运行
- Kafka集群动态修改复制因子以及数据留存时间
- oracle密码重置