// Copyright (c) 2021 homuler // // Use of this source code is governed by an MIT-style // license that can be found in the LICENSE file or at // https://opensource.org/licenses/MIT. // ATTENTION!: This code is for a tutorial. using System.Collections; using System.Collections.Generic; using System.Diagnostics; using Unity.VisualScripting; using UnityEngine; using UnityEngine.UI; using Mediapipe.Unity.CoordinateSystem; namespace Mediapipe.Unity.Tutorial { public class Wesign_extractor : MonoBehaviour { /// /// Config file to set up the graph /// [SerializeField] private TextAsset _configAsset; /// /// The screen object on which the video is displayed /// [SerializeField] private RawImage _screen; /// /// width of the screen /// [SerializeField] private int _width; /// /// height of the screen /// [SerializeField] private int _height; /// /// fps of the screen /// [SerializeField] private int _fps; /// /// Landmark annotation controller to show the landmarks on the screen /// [SerializeField] private PoseLandmarkListAnnotationController _poseLandmarkListAnnotationController; /// /// MediaPipe graph /// private CalculatorGraph _graph; /// /// Resource manager for graph resources /// private ResourceManager _resourceManager; /// /// Webcam texture /// private WebCamTexture _webCamTexture; /// /// Input texture /// private Texture2D _inputTexture; /// /// Screen pixel data /// private Color32[] _pixelData; /// /// Stopwatch to give a timestamp to video frames /// private Stopwatch _stopwatch; /// /// Google Mediapipe setup & run /// /// IEnumerator /// private IEnumerator Start() { // Webcam setup if (WebCamTexture.devices.Length == 0) { throw new System.Exception("Web Camera devices are not found"); } var webCamDevice = WebCamTexture.devices[0]; _webCamTexture = new WebCamTexture(webCamDevice.name, _width, _height, _fps); _webCamTexture.Play(); yield return new WaitUntil(() => _webCamTexture.width > 16); _screen.rectTransform.sizeDelta = new Vector2(_width, _height); _screen.texture = _webCamTexture; // TODO this method is kinda meh you should use ImageFrame _inputTexture = new Texture2D(_width, _height, TextureFormat.RGBA32, false); _pixelData = new Color32[_width * _height]; //_resourceManager = new LocalResourceManager(); _resourceManager = new StreamingAssetsResourceManager(); yield return _resourceManager.PrepareAssetAsync("pose_detection.bytes"); yield return _resourceManager.PrepareAssetAsync("pose_landmark_full.bytes"); yield return _resourceManager.PrepareAssetAsync("face_landmark.bytes"); yield return _resourceManager.PrepareAssetAsync("hand_landmark_full.bytes"); yield return _resourceManager.PrepareAssetAsync("face_detection_short_range.bytes"); yield return _resourceManager.PrepareAssetAsync("hand_recrop.bytes"); yield return _resourceManager.PrepareAssetAsync("handedness.txt"); _stopwatch = new Stopwatch(); // Setting up the graph _graph = new CalculatorGraph(_configAsset.text); var posestream = new OutputStream(_graph, "pose_landmarks"); var leftstream = new OutputStream(_graph, "left_hand_landmarks"); var rightstream = new OutputStream(_graph, "right_hand_landmarks"); posestream.StartPolling().AssertOk(); leftstream.StartPolling().AssertOk(); rightstream.StartPolling().AssertOk(); _graph.StartRun().AssertOk(); _stopwatch.Start(); while (true) { _inputTexture.SetPixels32(_webCamTexture.GetPixels32(_pixelData)); var imageFrame = new ImageFrame(ImageFormat.Types.Format.Srgba, _width, _height, _width * 4, _inputTexture.GetRawTextureData()); var currentTimestamp = _stopwatch.ElapsedTicks / (System.TimeSpan.TicksPerMillisecond / 1000); _graph.AddPacketToInputStream("input_video", new ImageFramePacket(imageFrame, new Timestamp(currentTimestamp))).AssertOk(); yield return new WaitForEndOfFrame(); //posestream.TryGetNext(out var poseLandmarks); if (posestream.TryGetNext(out var poseLandmarks)) { if (poseLandmarks != null) { // Draw the poseLandmarks on the screen _poseLandmarkListAnnotationController.DrawNow(poseLandmarks); var x = poseLandmarks.Landmark[0]; UnityEngine.Debug.Log($"Pose Coordinates: {x}"); } } if (leftstream.TryGetNext(out var leftLandmarks)) { if (leftLandmarks != null) { var x = leftLandmarks.Landmark[0]; UnityEngine.Debug.Log($"Pose left Coordinates: {x}"); } } if (rightstream.TryGetNext(out var rightLandmarks)) { if (rightLandmarks != null) { var x = rightLandmarks.Landmark[0]; UnityEngine.Debug.Log($"Pose right Coordinates: {x}"); } } } } /// /// Propper destruction on the Mediapipegraph /// private void OnDestroy() { if (_webCamTexture != null) { _webCamTexture.Stop(); } if (_graph != null) { try { _graph.CloseInputStream("input_video").AssertOk(); _graph.WaitUntilDone().AssertOk(); } finally { _graph.Dispose(); } } } } }