本文实例为大家分享了Unity实现圆形Image组件的具体代码,供大家参考,具体内容如下
一、前言游戏里很多图片都是以圆形展示的,例如头像、技能图标等,一般做法是使用Image组件+Mask组件实现,但是Mask组件会影响效率(增加额外的drawcall)所以不建议大量使用
UGUI的Mask实现原理:利用GPU的模版缓冲
Mask组件会赋给父级和子级UI一个特殊的材质,这个材质会给Image的每个像素点进行标记并放在一个称为Stencil Buffer的缓存内,父级每个像素点的标记设置为1,子级UI进行渲染的时候会去检查这个Stencil Buffer内的标记是否为1,如果为1则进行渲染,否则不渲染
二、实现自己的圆形组件像Image,RawImage这些组件都是继承自自MsakGraphics类,MsakGraphics类继承自Graphic类,Graphic类中有个OnPopulateMesh方法用于绘制图形,UGUI的Image组件实现原理是重写了OnPopulateMesh方法并绘制了一个矩形,所以按照这个思路我们可以重写OnPopulateMesh方法直接绘制一个圆形
——获取图片的长宽、uv等信息
——OnPopulateMesh:当UI元素生成顶点数据时会调用OnPopulateMesh(VertexHelper vh)函数,我们只需要将原先的矩形顶点数据清除,改写入圆形顶点数据,这样渲染出来的自然是圆形图片
——不规则UI元素的响应区域判定
UI组件的响应区域判定是通过实现ICanvasRaycastFilter接口中的IsRaycastLocationValid函数,它的返回值是一个bool值,返回true则视为可以响应,例如Image组件,它判定了两个条件:当前屏幕坐标是否在当前图片矩形区域内和当前屏幕坐标的图片区域透明度是否大于alphaHitTestMinimumThreshold参数
我们想实现精确的点击判断,可以代码动态将alphaHitTestMinimumThreshold参数设置为0.1,这样就实现了只有在透明度大于0.1的像素点才视为响应,但它要求图片的Read/Write Enabled必须开启,这就导致了图片占用了两份内存,所以不建议使用
对于像素级的点击判定,有一种算法可以实现:Ray-Crossing算法
此算法适用于所有图形,实现思路是从指定点向任意方向发出一条水平射线,与图形相交,如果交点是奇数个,则点在图形内,如果交点是偶数个,则点在图形外
using UnityEngine;
using UnityEngine.Sprites;
using UnityEngine.UI;
using System.Collections.Generic;
/// <summary>
/// 圆形Image组件
/// </summary>
[AddComponentMenu("LFramework/UI/CircleImage", 11)]
public class CircleImage : MaskableGraphic, ICanvasRaycastFilter
{
/// <summary>
/// 渲染类型
/// </summary>
public enum RenderType
{
Simple,
Filled,
}
/// <summary>
/// 填充类型
/// </summary>
public enum FilledType
{
Radial360,
}
/// <summary>
/// 绘制起始点(填充类型-360度)
/// </summary>
public enum Origin360
{
Right,
Top,
Left,
Bottom,
}
//Sprite图片
[SerializeField]
Sprite m_Sprite;
public Sprite Sprite
{
get { return m_Sprite; }
}
//贴图
public override Texture mainTexture
{
get
{
if (m_Sprite == null)
{
if (material != null && material.mainTexture != null)
{
return material.mainTexture;
}
return s_WhiteTexture;
}
return m_Sprite.texture;
}
}
//渲染类型
[SerializeField]
RenderType m_RenderType;
//填充类型
[SerializeField]
FilledType m_FilledType;
//绘制起始点(填充类型-360度)
[SerializeField]
Origin360 m_Origin360;
//是否为顺时针绘制
[SerializeField]
bool m_Clockwise;
//填充度
[SerializeField]
[Range(0, 1)]
float m_FillAmount;
//多少个三角面组成
[SerializeField]
int segements = 100;
List<Vector3> vertexCache = new List<Vector3>();
protected override void OnPopulateMesh(VertexHelper vh)
{
vh.Clear();
vertexCache.Clear();
switch (m_RenderType)
{
case RenderType.Simple:
GenerateSimpleSprite(vh);
break;
case RenderType.Filled:
GenerateFilledSprite(vh);
break;
}
}
void GenerateSimpleSprite(VertexHelper vh)
{
Vector4 uv = m_Sprite == null
? Vector4.zero
: DataUtility.GetOuterUV(m_Sprite);
float uvWidth = uv.z - uv.x;
float uvHeight = uv.w - uv.y;
float width = rectTransform.rect.width;
float height = rectTransform.rect.height;
float dia = width > height ? width : height;
float r = dia * 0.5f;
Vector2 uvCenter = new Vector2((uv.x + uv.z) * 0.5f, (uv.y + uv.w) * 0.5f);
Vector3 posCenter = new Vector2((0.5f - rectTransform.pivot.x) * width, (0.5f - rectTransform.pivot.y) * height);
float uvScaleX = uvWidth / width;
float uvScaleY = uvHeight / height;
float deltaRad = 2 * Mathf.PI / segements;
float curRad = 0;
int vertexCount = segements + 1;
vh.AddVert(posCenter, color, uvCenter);
for (int i = 0; i < vertexCount - 1; i++)
{
UIVertex vertex = new UIVertex();
Vector3 posOffset = new Vector3(r * Mathf.Cos(curRad), r * Mathf.Sin(curRad));
vertex.position = posCenter + posOffset;
vertex.color = color;
vertex.uv0 = new Vector2(uvCenter.x + posOffset.x * uvScaleX, uvCenter.y + posOffset.y * uvScaleY);
vh.AddVert(vertex);
vertexCache.Add(vertex.position);
curRad += deltaRad;
}
for (int i = 0; i < vertexCount - 2; i++)
{
vh.AddTriangle(0, i + 1, i + 2);
}
vh.AddTriangle(0, segements, 1);
}
void GenerateFilledSprite(VertexHelper vh)
{
Vector4 uv = m_Sprite == null
? Vector4.zero
: DataUtility.GetOuterUV(m_Sprite);
float uvWidth = uv.z - uv.x;
float uvHeight = uv.w - uv.y;
float width = rectTransform.rect.width;
float height = rectTransform.rect.height;
float dia = width > height ? width : height;
float r = dia * 0.5f;
Vector2 uvCenter = new Vector2((uv.x + uv.z) * 0.5f, (uv.y + uv.w) * 0.5f);
Vector3 posCenter = new Vector2((0.5f - rectTransform.pivot.x) * width, (0.5f - rectTransform.pivot.y) * height);
float uvScaleX = uvWidth / width;
float uvScaleY = uvHeight / height;
float deltaRad = 2 * Mathf.PI / segements;
switch (m_FilledType)
{
case FilledType.Radial360:
float quarterRad = 2 * Mathf.PI * 0.25f;
float curRad = quarterRad * (int)m_Origin360;
int vertexCount = m_FillAmount == 1
? segements + 1
: Mathf.RoundToInt(segements * m_FillAmount) + 2;
vh.AddVert(posCenter, color, uvCenter);
for (int i = 0; i < vertexCount - 1; i++)
{
UIVertex vertex = new UIVertex();
Vector3 posOffset = new Vector3(r * Mathf.Cos(curRad), r * Mathf.Sin(curRad));
vertex.position = posCenter + posOffset;
vertex.color = color;
vertex.uv0 = new Vector2(uvCenter.x + posOffset.x * uvScaleX, uvCenter.y + posOffset.y * uvScaleY);
vh.AddVert(vertex);
vertexCache.Add(vertex.position);
curRad += m_Clockwise ? -deltaRad : deltaRad;
}
for (int i = 0; i < vertexCount - 2; i++)
{
vh.AddTriangle(0, i + 1, i + 2);
}
if (m_FillAmount == 1)
{
vh.AddTriangle(0, segements, 1);
}
break;
}
}
public bool IsRaycastLocationValid(Vector2 sp, Camera eventCamera)
{
Vector2 localPos;
int crossPointCount;
RectTransformUtility.ScreenPointToLocalPointInRectangle(rectTransform, sp, eventCamera, out localPos);
RayCrossing(localPos, out crossPointCount);
return crossPointCount % 2 != 0;
}
public void RayCrossing(Vector2 localPos, out int crossPointCount)
{
crossPointCount = 0;
for (int i = 0; i < vertexCache.Count; i++)
{
Vector3 p1 = vertexCache[i];
Vector3 p2 = vertexCache[(i + 1) % vertexCache.Count];
if (p1.y == p2.y) continue;
if (localPos.y <= Mathf.Min(p1.y, p2.y)) continue;
if (localPos.y >= Mathf.Max(p1.y, p2.y)) continue;
float crossX = (localPos.y - p1.y) * (p2.x - p1.x) / (p2.y - p1.y) + p1.x;
if (crossX >= localPos.x)
{
crossPointCount++;
}
}
}
}