目录
创建c#脚本
自己创建gui组件:
入门教程:
读取图片:
Unity读取图片并显示到UI中
显示双目相机,可以跑通
unity3d显示图片
参考教程,GameObject
Unity UGUI的Image(图片)组件的介绍及使用 - 简书
创建c#脚本
using System;
using System.Collections;
using System.Collections.Generic;
using System.IO;
using UnityEngine;
using UnityEngine.UI;public class LoadImageOnClick : MonoBehaviour
{// Start is called before the first frame updatepublic Image imageComponent;public Button button;void Start(){button = GetComponent<Button>();imageComponent = GetComponent<Image>();// 检查按钮是否成功获取if (button != null){button.onClick.AddListener(LoadAndShowImage);}else{Debug.LogError("Button component not found!");}}void LoadAndShowImage(){FileStream fs = new FileStream(@"C:\Users\Administrator\Pictures\mm\pics\005953_4.jpg", FileMode.Open, FileAccess.Read);fs.Seek(0, SeekOrigin.Begin);//游标的操作,可有可无byte[] bytes = new byte[fs.Length];//生命字节,用来存储读取到的图片字节try{fs.Read(bytes, 0, bytes.Length);//开始读取,这里最好用trycatch语句,防止读取失败报错}catch (Exception e){Debug.Log(e);}fs.Close();//切记关闭int width = 800;//图片的宽(这里两个参数可以提到方法参数中)int height = 800;//图片的高(这里说个题外话,pico相关的开发,这里不能大于4k×4k不然会显示异常,当时开发pico的时候应为这个问题找了大半天原因,因为美术给的图是6000*3600,导致出现切几张图后就黑屏了。。。Texture2D texture = new Texture2D(width, height);if (texture.LoadImage(bytes)){print("图片加载完毕 ");}else{print("图片尚未加载");}// 加载图片// Texture2D texture = Resources.Load<Texture2D>("YourImageName"); // 替换 "YourImageName" 为您的图片资源名称// 检查图片是否成功加载if (texture != null){print("显示加载的图片 ");imageComponent.sprite = Sprite.Create(texture, new Rect(0, 0, texture.width, texture.height), Vector2.zero);print("显示end ");}else{print("Failed to load image ");Debug.LogError("Failed to load image!");}}// Update is called once per framevoid Update(){}
}
自己创建gui组件:
操作步骤:
- 创建一个空对象,并将该脚本挂载到该对象上。
- 在场景中添加一个Canvas对象,并将Canvas的Render Mode设置为Screen Space - Overlay。
- 在Canvas下创建一个Image对象,并将Image组件拖拽到脚本的image字段上。
- 将要显示的图片资源拖拽到脚本的sprite字段上。
- 运行游戏,图片将会在场景中显示出来。
另一种方法:把脚本拖拽到Canvas下。
在脚本image属性中绑定脚本的变量。
入门教程:
Unity读取项目文件夹图片,PC端_unity 获取配置文件中的图片-CSDN博客
读取图片:
using System.Collections;
using System.Collections.Generic;
using UnityEngine;
using System.IO;
using UnityEngine.UI;public class Choose : MonoBehaviour
{private GameObject canvas;private Button _btn;private GameObject button;private List<Texture2D> images = new List<Texture2D>();void Start(){canvas = GameObject.Find("Canvas/Scroll View/Viewport/Content");load();for (int i = 0; i < images.Count; i++){button = new GameObject("Button" + i, typeof(Button), typeof(RectTransform), typeof(Image)); //创建一个GameObject 加入Button组件button.transform.SetParent(this.canvas.transform); //把Canvas设置成Button的父物体_btn = button.GetComponent<Button>(); //获得Button的Button组件//先创建一个Texture2D对象,用于把流数据转成Texture2DSprite sprite = Sprite.Create(images[i], new Rect(0, 0, images[i].width, images[i].height), Vector2.zero);button.GetComponent<Image>().sprite = sprite;button.GetComponent<Button>().onClick.AddListener(ChooseButton);}}/// <summary>/// 加载文件夹内图片/// </summary>void load(){List<string> filePaths = new List<string>();string imgtype = "*.BMP|*.JPG|*.GIF|*.PNG";string[] ImageType = imgtype.Split('|');for (int i = 0; i < ImageType.Length; i++){//获取Application.dataPath文件夹下所有的图片路径 string[] dirs = Directory.GetFiles((Application.dataPath + "/Resources/Screenshot/"), ImageType[i]);for (int j = 0; j < dirs.Length; j++){filePaths.Add(dirs[j]);}}for (int i = 0; i < filePaths.Count; i++){Texture2D tx = new Texture2D(100, 100);tx.LoadImage(getImageByte(filePaths[i]));images.Add(tx);}}/// <summary> /// 根据图片路径返回图片的字节流byte[] /// </summary> /// <param name="imagePath">图片路径</param> /// <returns>返回的字节流</returns> private static byte[] getImageByte(string imagePath){FileStream files = new FileStream(imagePath, FileMode.Open);byte[] imgByte = new byte[files.Length];files.Read(imgByte, 0, imgByte.Length);files.Close();return imgByte;}public void ChooseButton(){//UGUI上Button按钮事件Debug.Log("按下了");}}
Unity读取图片并显示到UI中
Unity读取图片并显示到UI中_unity 显示图片-CSDN博客
显示双目相机,可以跑通
GitHub - Sliicy/VR-USB-Camera-Viewer: Unity Project that enables viewing 2 USB cameras in VR as a stereograph image.
unity3d显示图片
Unity3d中(加载(内部、外部))显示图片(sprite、texture2d) - 哔哩哔哩
有2d的,3d的,可以对着视频跑通。
GitHub - creativeIKEP/BlazePoseBarracuda: BlazePoseBarracuda is a human 2D/3D pose estimation neural network that runs the Mediapipe Pose (BlazePose) pipeline on the Unity Barracuda with GPU.
面部的两个项目,需要再看看
GitHub - creativeIKEP/HolisticMotionCapture: HolisticMotionCapture is an application and package that can capture the motion of a person with only a monocular color camera and move the VRM avatar's pose, face, and hands.
GitHub - creativeIKEP/HolisticBarracuda: HolisticBarracuda is the Unity Package that simultaneously estimates 33 pose, 21 per-hand, and 468 facial landmarks on the Unity Barracuda with GPU.
这个好像可以:
GitHub - natmlx/movenet-3d-unity: MoveNet 3D pose detection sample in Unity Engine.