C# OpenCvSharp DNN Gaze Estimation

目录

介绍

效果

模型信息

项目

代码

frmMain.cs

GazeEstimation.cs

下载


C# OpenCvSharp DNN Gaze Estimation

介绍

训练源码地址:https://github.com/deepinsight/insightface/tree/master/reconstruction/gaze

效果

模型信息

Inputs
-------------------------
name:input
tensor:Float[1, 3, 160, 160]
---------------------------------------------------------------

Outputs
-------------------------
name:output
tensor:Float[1, 962, 3]
---------------------------------------------------------------

项目

代码

frmMain.cs

using OpenCvSharp;
using OpenCvSharp.Dnn;
using System;
using System.Collections.Generic;
using System.Drawing;
using System.Windows.Forms;

namespace OpenCvSharp_Yolov8_Demo
{
    public partial class frmMain : Form
    {
        public frmMain()
        {
            InitializeComponent();
        }

        string fileFilter = "*.*|*.bmp;*.jpg;*.jpeg;*.tiff;*.tiff;*.png";
        string image_path = "";
        string startupPath;
        Mat image;
        Mat result_image;

        YOLOv8_face face_detector = new YOLOv8_face("model/yolov8n-face.onnx", 0.45f, 0.5f);
        GazeEstimation gaze_predictor=new GazeEstimation("model/generalizing_gaze_estimation_with_weak_supervision_from_synthetic_views_1x3x160x160.onnx");

        private void Form1_Load(object sender, EventArgs e)
        {

            image_path = "img_test/1.jpg";
            pictureBox1.Image = new Bitmap(image_path);
            image = new Mat(image_path);
        }

        private void button1_Click(object sender, EventArgs e)
        {
            OpenFileDialog ofd = new OpenFileDialog();
            ofd.Filter = fileFilter;
            if (ofd.ShowDialog() != DialogResult.OK) return;
            pictureBox1.Image = null;
            image_path = ofd.FileName;
            pictureBox1.Image = new Bitmap(image_path);
            textBox1.Text = "";
            pictureBox2.Image = null;
        }

        private void button2_Click(object sender, EventArgs e)
        {
            if (image_path == "")
            {
                return;
            }
            textBox1.Text = "";
            pictureBox2.Image = null;
            button2.Enabled = false;
            Application.DoEvents();

            image = new Mat(image_path);
            List<Face> ltFace = face_detector.Detect(new Mat(image_path));

            if (ltFace.Count > 0)
            {
                result_image = image.Clone();
                //face_detector.DrawPred(ltFace, result_image);
                String info = "";
                foreach (Face item in ltFace)
                {
                    gaze_predictor.Detect(image, item);
                    result_image = gaze_predictor.DrawOn(result_image, item,out info);
                }
                pictureBox2.Image = new Bitmap(result_image.ToMemoryStream());
                textBox1.Text = info;
            }
            else
            {
                textBox1.Text = "无信息";
            }

            button2.Enabled = true;
        }

    }
}
 

using OpenCvSharp;
using OpenCvSharp.Dnn;
using System;
using System.Collections.Generic;
using System.Drawing;
using System.Windows.Forms;

namespace OpenCvSharp_Yolov8_Demo
{
    public partial class frmMain : Form
    {
        public frmMain()
        {
            InitializeComponent();
        }

        string fileFilter = "*.*|*.bmp;*.jpg;*.jpeg;*.tiff;*.tiff;*.png";
        string image_path = "";
        string startupPath;
        Mat image;
        Mat result_image;

        YOLOv8_face face_detector = new YOLOv8_face("model/yolov8n-face.onnx", 0.45f, 0.5f);
        GazeEstimation gaze_predictor=new GazeEstimation("model/generalizing_gaze_estimation_with_weak_supervision_from_synthetic_views_1x3x160x160.onnx");

        private void Form1_Load(object sender, EventArgs e)
        {

            image_path = "img_test/1.jpg";
            pictureBox1.Image = new Bitmap(image_path);
            image = new Mat(image_path);
        }

        private void button1_Click(object sender, EventArgs e)
        {
            OpenFileDialog ofd = new OpenFileDialog();
            ofd.Filter = fileFilter;
            if (ofd.ShowDialog() != DialogResult.OK) return;
            pictureBox1.Image = null;
            image_path = ofd.FileName;
            pictureBox1.Image = new Bitmap(image_path);
            textBox1.Text = "";
            pictureBox2.Image = null;
        }

        private void button2_Click(object sender, EventArgs e)
        {
            if (image_path == "")
            {
                return;
            }
            textBox1.Text = "";
            pictureBox2.Image = null;
            button2.Enabled = false;
            Application.DoEvents();

            image = new Mat(image_path);
            List<Face> ltFace = face_detector.Detect(new Mat(image_path));

            if (ltFace.Count > 0)
            {
                result_image = image.Clone();
                //face_detector.DrawPred(ltFace, result_image);
                String info = "";
                foreach (Face item in ltFace)
                {
                    gaze_predictor.Detect(image, item);
                    result_image = gaze_predictor.DrawOn(result_image, item,out info);
                }
                pictureBox2.Image = new Bitmap(result_image.ToMemoryStream());
                textBox1.Text = info;
            }
            else
            {
                textBox1.Text = "无信息";
            }

            button2.Enabled = true;
        }

    }
}

GazeEstimation.cs

 unsafe public class GazeEstimation
    {

        float[] mean = new float[] { 0.5f, 0.5f, 0.5f };
        float[] std = new float[] { 0.5f, 0.5f, 0.5f };
        int[] iris_idx_481 = new int[] { 248, 252, 224, 228, 232, 236, 240, 244 };
        int num_eye = 481;
        int input_size = 160;
        float[] eye_kps;
        Net net;

        public GazeEstimation(string modelpath)
        {
            net = CvDnn.ReadNetFromOnnx(modelpath);
            eye_kps = new float[num_eye * 2 * 3];
        }

        public void Detect(Mat img, Face box)
        {

            Point kps_right_eye = box.kpt[1];
            Point kps_left_eye = box.kpt[0];
            float[] center = new float[] { (kps_right_eye.X + kps_left_eye.X) * 0.5f, (kps_right_eye.Y + kps_left_eye.Y) * 0.5f };
            float _size = (float)(Math.Max(box.rect.Width / 1.5f, Math.Abs(kps_right_eye.X - kps_left_eye.X)) * 1.5f);

            float _scale = input_size / _size;
            //transform
            float cx = center[0] * _scale;
            float cy = center[1] * _scale;

            float[] data = new float[] { _scale, 0, (float)(-cx + input_size * 0.5), 0, _scale, (float)(-cy + input_size * 0.5) };
            Mat M = new Mat(2, 3, MatType.CV_32F, data);

            Mat cropped = new Mat();
            Cv2.WarpAffine(img, cropped, M, new Size(input_size, input_size));

            Mat rgbimg = new Mat();
            Cv2.CvtColor(cropped, rgbimg, ColorConversionCodes.BGR2RGB);

            Mat normalized_mat = Normalize(rgbimg);

            Mat blob = CvDnn.BlobFromImage(normalized_mat);

            net.SetInput(blob);

            Mat[] outs = new Mat[3] { new Mat(), new Mat(), new Mat() };
            string[] outBlobNames = net.GetUnconnectedOutLayersNames().ToArray();

            net.Forward(outs, outBlobNames);

            float* opred = (float*)outs[0].Data;//outs[0]的形状是(1,962,3)
            Mat IM = new Mat();
            Cv2.InvertAffineTransform(M, IM);
            //trans_points
            float scale = (float)Math.Sqrt(IM.At<float>(0, 0) * IM.At<float>(0, 0) + IM.At<float>(0, 1) * IM.At<float>(0, 1));
            int row = outs[0].Size(1);
            int col = outs[0].Size(2);

            for (int i = 0; i < row; i++)
            {
                eye_kps[i * 3] = IM.At<float>(0, 0) * opred[i * 3] + IM.At<float>(0, 1) * opred[i * 3 + 1] + IM.At<float>(0, 2);
                eye_kps[i * 3 + 1] = IM.At<float>(1, 0) * opred[i * 3] + IM.At<float>(1, 1) * opred[i * 3 + 1] + IM.At<float>(1, 2);
                eye_kps[i * 3 + 2] = opred[i * 3 + 2] * scale;
            }

        }

        public Mat DrawOn(Mat srcimg, Face box, out string info)
        {
            StringBuilder sb = new StringBuilder();

            float rescale = 300.0f / box.rect.Width;
            Mat eimg = new Mat();
            Cv2.Resize(srcimg, eimg, new Size(), rescale, rescale);
            //draw_item
            int row = num_eye * 2;
            for (int i = 0; i < row; i++)
            {
                float tmp = eye_kps[i * 3];
                eye_kps[i * 3] = eye_kps[i * 3 + 1] * rescale;
                eye_kps[i * 3 + 1] = tmp * rescale;
                eye_kps[i * 3 + 2] *= rescale;
            }
            //angles_and_vec_from_eye
            int slice = num_eye * 3;
            float[] theta_x_y_vec_l = new float[5];

            float[] eye_kps_l = new float[481 * 3];
            float[] eye_kps_r = new float[481 * 3];

            Array.Copy(eye_kps, 0, eye_kps_l, 0, 481 * 3);
            Array.Copy(eye_kps, 481 * 3, eye_kps_r, 0, 481 * 3);

            angles_and_vec_from_eye(eye_kps_l, iris_idx_481, theta_x_y_vec_l);
            float[] theta_x_y_vec_r = new float[5];

            angles_and_vec_from_eye(eye_kps_r, iris_idx_481, theta_x_y_vec_r);

            float[] gaze_pred = new float[] { (float)((theta_x_y_vec_l[0] + theta_x_y_vec_r[0]) * 0.5), (float)((theta_x_y_vec_l[1] + theta_x_y_vec_r[1]) * 0.5) };

            float diag = (float)Math.Sqrt((float)eimg.Rows * eimg.Cols);

            float[] eye_pos_left = new float[] { 0, 0 };
            float[] eye_pos_right = new float[] { 0, 0 };
            for (int i = 0; i < 8; i++)
            {
                int ind = iris_idx_481[i];
                eye_pos_left[0] += eye_kps[ind * 3];
                eye_pos_left[1] += eye_kps[ind * 3 + 1];
                eye_pos_right[0] += eye_kps[slice + ind * 3];
                eye_pos_right[1] += eye_kps[slice + ind * 3 + 1];
            }
            eye_pos_left[0] /= 8;
            eye_pos_left[1] /= 8;
            eye_pos_right[0] /= 8;
            eye_pos_right[1] /= 8;

            float dx = (float)(0.4 * diag * Math.Sin(theta_x_y_vec_l[1]));
            float dy = (float)(0.4 * diag * Math.Sin(theta_x_y_vec_l[0]));
            Point eye_left_a = new Point(eye_pos_left[1], eye_pos_left[0]);  左眼的箭头线的起始点坐标
            Point eye_left_b = new Point(eye_pos_left[1] + dx, eye_pos_left[0] + dy);   左右的箭头线的终点坐标

            Cv2.ArrowedLine(eimg, eye_left_a, eye_left_b, new Scalar(0, 0, 255), 5, LineTypes.AntiAlias, 0, 0.18);
            float yaw_deg_l = (float)(theta_x_y_vec_l[1] * (180 / Math.PI));
            float pitch_deg_l = (float)(-theta_x_y_vec_l[0] * (180 / Math.PI));

            dx = (float)(0.4 * diag * Math.Sin(theta_x_y_vec_r[1]));
            dy = (float)(0.4 * diag * Math.Sin(theta_x_y_vec_r[0]));

            Point eye_right_a = new Point(eye_pos_right[1], eye_pos_right[0]);  右眼的箭头线的起始点坐标
            Point eye_right_b = new Point(eye_pos_right[1] + dx, eye_pos_right[0] + dy);  右眼的箭头线的终点坐标
            Cv2.ArrowedLine(eimg, eye_right_a, eye_right_b, new Scalar(0, 0, 255), 5, LineTypes.AntiAlias, 0, 0.18);

            float yaw_deg_r = (float)(theta_x_y_vec_r[1] * (180 / Math.PI));
            float pitch_deg_r = (float)(-theta_x_y_vec_r[0] * (180 / Math.PI));

            Cv2.Resize(eimg, eimg, new Size(srcimg.Cols, srcimg.Rows));
            //draw Yaw, Pitch
            string label = String.Format("L-Yaw : {0:f2}", yaw_deg_l);
            sb.AppendLine(label);
            Cv2.PutText(eimg, label, new Point(eimg.Cols - 200, 30), HersheyFonts.HersheySimplex, 0.7, new Scalar(0, 255, 0), 2);

            label = String.Format("L-Pitch :{0:f2}", pitch_deg_l);
            sb.AppendLine(label);
            Cv2.PutText(eimg, label, new Point(eimg.Cols - 200, 60), HersheyFonts.HersheySimplex, 0.7, new Scalar(0, 255, 0), 2);

            label = String.Format("R-Yaw : {0:f2}", yaw_deg_r);
            sb.AppendLine(label);
            Cv2.PutText(eimg, label, new Point(eimg.Cols - 200, 90), HersheyFonts.HersheySimplex, 0.7, new Scalar(0, 255, 0), 2);

            label = String.Format("R-Pitch : {0:f2}", pitch_deg_r);
            sb.AppendLine(label);
            Cv2.PutText(eimg, label, new Point(eimg.Cols - 200, 120), HersheyFonts.HersheySimplex, 0.7, new Scalar(0, 255, 0), 2);

            info = sb.ToString();

            return eimg;

        }

        public Mat Normalize(Mat src)
        {
            Mat[] bgr = src.Split();
            for (int i = 0; i < bgr.Length; ++i)
            {
                bgr[i].ConvertTo(bgr[i], MatType.CV_32FC1, 1.0 / (255.0 * std[i]), (0.0 - mean[i]) / std[i]);
            }

            Cv2.Merge(bgr, src);

            foreach (Mat channel in bgr)
            {
                channel.Dispose();
            }

            return src;
        }

        /// <summary>
        /// 输入参数eye的形状是(481,3)
        /// 输入参数iris_lms_idx的长度shi
        /// 输出theta_x_y_vec的长度是5, 分别是theta_x, theta_y, vec[0], vec[1], vec[2]
        /// </summary>
        void angles_and_vec_from_eye(float[] eye, int[] iris_lms_idx, float[] theta_x_y_vec)
        {

            float[] mean = new float[] { 0, 0, 0 };
            for (int i = 0; i < 32; i++)
            {
                mean[0] += eye[i * 3];
                mean[1] += eye[i * 3 + 1];
                mean[2] += eye[i * 3 + 2];
            }
            mean[0] /= 32;
            mean[1] /= 32;
            mean[2] /= 32;

            float[] p_iris = new float[8 * 3];
            for (int i = 0; i < 8; i++)
            {
                int ind = iris_lms_idx[i];
                p_iris[i * 3] = eye[ind * 3] - mean[0];
                p_iris[i * 3 + 1] = eye[ind * 3 + 1] - mean[1];
                p_iris[i * 3 + 2] = eye[ind * 3 + 2] - mean[2];
            }

            float[] mean_p_iris = new float[] { 0, 0, 0 };
            for (int i = 0; i < 8; i++)
            {
                mean_p_iris[0] += p_iris[i * 3];
                mean_p_iris[1] += p_iris[i * 3 + 1];
                mean_p_iris[2] += p_iris[i * 3 + 2];
            }
            mean_p_iris[0] /= 8;
            mean_p_iris[1] /= 8;
            mean_p_iris[2] /= 8;

            float l2norm_p_iris = (float)Math.Sqrt(mean_p_iris[0] * mean_p_iris[0] + mean_p_iris[1] * mean_p_iris[1] + mean_p_iris[2] * mean_p_iris[2]);
            theta_x_y_vec[2] = mean_p_iris[0] / l2norm_p_iris;  ///vec[0]
            theta_x_y_vec[3] = mean_p_iris[1] / l2norm_p_iris;  ///vec[1]
            theta_x_y_vec[4] = mean_p_iris[2] / l2norm_p_iris;  ///vec[2]

            //angles_from_vec
            float x = -theta_x_y_vec[4];
            float y = theta_x_y_vec[3];
            float z = -theta_x_y_vec[2];
            float theta = (float)Math.Atan2(y, x);
            float phi = (float)(Math.Atan2(Math.Sqrt(x * x + y * y), z) - Math.PI * 0.5);
            theta_x_y_vec[0] = phi;
            theta_x_y_vec[1] = theta;
        }

    }

 unsafe public class GazeEstimation
    {

        float[] mean = new float[] { 0.5f, 0.5f, 0.5f };
        float[] std = new float[] { 0.5f, 0.5f, 0.5f };
        int[] iris_idx_481 = new int[] { 248, 252, 224, 228, 232, 236, 240, 244 };
        int num_eye = 481;
        int input_size = 160;
        float[] eye_kps;
        Net net;

        public GazeEstimation(string modelpath)
        {
            net = CvDnn.ReadNetFromOnnx(modelpath);
            eye_kps = new float[num_eye * 2 * 3];
        }

        public void Detect(Mat img, Face box)
        {

            Point kps_right_eye = box.kpt[1];
            Point kps_left_eye = box.kpt[0];
            float[] center = new float[] { (kps_right_eye.X + kps_left_eye.X) * 0.5f, (kps_right_eye.Y + kps_left_eye.Y) * 0.5f };
            float _size = (float)(Math.Max(box.rect.Width / 1.5f, Math.Abs(kps_right_eye.X - kps_left_eye.X)) * 1.5f);

            float _scale = input_size / _size;
            //transform
            float cx = center[0] * _scale;
            float cy = center[1] * _scale;

            float[] data = new float[] { _scale, 0, (float)(-cx + input_size * 0.5), 0, _scale, (float)(-cy + input_size * 0.5) };
            Mat M = new Mat(2, 3, MatType.CV_32F, data);

            Mat cropped = new Mat();
            Cv2.WarpAffine(img, cropped, M, new Size(input_size, input_size));

            Mat rgbimg = new Mat();
            Cv2.CvtColor(cropped, rgbimg, ColorConversionCodes.BGR2RGB);

            Mat normalized_mat = Normalize(rgbimg);

            Mat blob = CvDnn.BlobFromImage(normalized_mat);

            net.SetInput(blob);

            Mat[] outs = new Mat[3] { new Mat(), new Mat(), new Mat() };
            string[] outBlobNames = net.GetUnconnectedOutLayersNames().ToArray();

            net.Forward(outs, outBlobNames);

            float* opred = (float*)outs[0].Data;//outs[0]的形状是(1,962,3)
            Mat IM = new Mat();
            Cv2.InvertAffineTransform(M, IM);
            //trans_points
            float scale = (float)Math.Sqrt(IM.At<float>(0, 0) * IM.At<float>(0, 0) + IM.At<float>(0, 1) * IM.At<float>(0, 1));
            int row = outs[0].Size(1);
            int col = outs[0].Size(2);

            for (int i = 0; i < row; i++)
            {
                eye_kps[i * 3] = IM.At<float>(0, 0) * opred[i * 3] + IM.At<float>(0, 1) * opred[i * 3 + 1] + IM.At<float>(0, 2);
                eye_kps[i * 3 + 1] = IM.At<float>(1, 0) * opred[i * 3] + IM.At<float>(1, 1) * opred[i * 3 + 1] + IM.At<float>(1, 2);
                eye_kps[i * 3 + 2] = opred[i * 3 + 2] * scale;
            }

        }

        public Mat DrawOn(Mat srcimg, Face box, out string info)
        {
            StringBuilder sb = new StringBuilder();

            float rescale = 300.0f / box.rect.Width;
            Mat eimg = new Mat();
            Cv2.Resize(srcimg, eimg, new Size(), rescale, rescale);
            //draw_item
            int row = num_eye * 2;
            for (int i = 0; i < row; i++)
            {
                float tmp = eye_kps[i * 3];
                eye_kps[i * 3] = eye_kps[i * 3 + 1] * rescale;
                eye_kps[i * 3 + 1] = tmp * rescale;
                eye_kps[i * 3 + 2] *= rescale;
            }
            //angles_and_vec_from_eye
            int slice = num_eye * 3;
            float[] theta_x_y_vec_l = new float[5];

            float[] eye_kps_l = new float[481 * 3];
            float[] eye_kps_r = new float[481 * 3];

            Array.Copy(eye_kps, 0, eye_kps_l, 0, 481 * 3);
            Array.Copy(eye_kps, 481 * 3, eye_kps_r, 0, 481 * 3);

            angles_and_vec_from_eye(eye_kps_l, iris_idx_481, theta_x_y_vec_l);
            float[] theta_x_y_vec_r = new float[5];

            angles_and_vec_from_eye(eye_kps_r, iris_idx_481, theta_x_y_vec_r);

            float[] gaze_pred = new float[] { (float)((theta_x_y_vec_l[0] + theta_x_y_vec_r[0]) * 0.5), (float)((theta_x_y_vec_l[1] + theta_x_y_vec_r[1]) * 0.5) };

            float diag = (float)Math.Sqrt((float)eimg.Rows * eimg.Cols);

            float[] eye_pos_left = new float[] { 0, 0 };
            float[] eye_pos_right = new float[] { 0, 0 };
            for (int i = 0; i < 8; i++)
            {
                int ind = iris_idx_481[i];
                eye_pos_left[0] += eye_kps[ind * 3];
                eye_pos_left[1] += eye_kps[ind * 3 + 1];
                eye_pos_right[0] += eye_kps[slice + ind * 3];
                eye_pos_right[1] += eye_kps[slice + ind * 3 + 1];
            }
            eye_pos_left[0] /= 8;
            eye_pos_left[1] /= 8;
            eye_pos_right[0] /= 8;
            eye_pos_right[1] /= 8;

            float dx = (float)(0.4 * diag * Math.Sin(theta_x_y_vec_l[1]));
            float dy = (float)(0.4 * diag * Math.Sin(theta_x_y_vec_l[0]));
            Point eye_left_a = new Point(eye_pos_left[1], eye_pos_left[0]);  左眼的箭头线的起始点坐标
            Point eye_left_b = new Point(eye_pos_left[1] + dx, eye_pos_left[0] + dy);   左右的箭头线的终点坐标

            Cv2.ArrowedLine(eimg, eye_left_a, eye_left_b, new Scalar(0, 0, 255), 5, LineTypes.AntiAlias, 0, 0.18);
            float yaw_deg_l = (float)(theta_x_y_vec_l[1] * (180 / Math.PI));
            float pitch_deg_l = (float)(-theta_x_y_vec_l[0] * (180 / Math.PI));

            dx = (float)(0.4 * diag * Math.Sin(theta_x_y_vec_r[1]));
            dy = (float)(0.4 * diag * Math.Sin(theta_x_y_vec_r[0]));

            Point eye_right_a = new Point(eye_pos_right[1], eye_pos_right[0]);  右眼的箭头线的起始点坐标
            Point eye_right_b = new Point(eye_pos_right[1] + dx, eye_pos_right[0] + dy);  右眼的箭头线的终点坐标
            Cv2.ArrowedLine(eimg, eye_right_a, eye_right_b, new Scalar(0, 0, 255), 5, LineTypes.AntiAlias, 0, 0.18);

            float yaw_deg_r = (float)(theta_x_y_vec_r[1] * (180 / Math.PI));
            float pitch_deg_r = (float)(-theta_x_y_vec_r[0] * (180 / Math.PI));

            Cv2.Resize(eimg, eimg, new Size(srcimg.Cols, srcimg.Rows));
            //draw Yaw, Pitch
            string label = String.Format("L-Yaw : {0:f2}", yaw_deg_l);
            sb.AppendLine(label);
            Cv2.PutText(eimg, label, new Point(eimg.Cols - 200, 30), HersheyFonts.HersheySimplex, 0.7, new Scalar(0, 255, 0), 2);

            label = String.Format("L-Pitch :{0:f2}", pitch_deg_l);
            sb.AppendLine(label);
            Cv2.PutText(eimg, label, new Point(eimg.Cols - 200, 60), HersheyFonts.HersheySimplex, 0.7, new Scalar(0, 255, 0), 2);

            label = String.Format("R-Yaw : {0:f2}", yaw_deg_r);
            sb.AppendLine(label);
            Cv2.PutText(eimg, label, new Point(eimg.Cols - 200, 90), HersheyFonts.HersheySimplex, 0.7, new Scalar(0, 255, 0), 2);

            label = String.Format("R-Pitch : {0:f2}", pitch_deg_r);
            sb.AppendLine(label);
            Cv2.PutText(eimg, label, new Point(eimg.Cols - 200, 120), HersheyFonts.HersheySimplex, 0.7, new Scalar(0, 255, 0), 2);

            info = sb.ToString();

            return eimg;

        }

        public Mat Normalize(Mat src)
        {
            Mat[] bgr = src.Split();
            for (int i = 0; i < bgr.Length; ++i)
            {
                bgr[i].ConvertTo(bgr[i], MatType.CV_32FC1, 1.0 / (255.0 * std[i]), (0.0 - mean[i]) / std[i]);
            }

            Cv2.Merge(bgr, src);

            foreach (Mat channel in bgr)
            {
                channel.Dispose();
            }

            return src;
        }

        /// <summary>
        /// 输入参数eye的形状是(481,3)
        /// 输入参数iris_lms_idx的长度shi
        /// 输出theta_x_y_vec的长度是5, 分别是theta_x, theta_y, vec[0], vec[1], vec[2]
        /// </summary>
        void angles_and_vec_from_eye(float[] eye, int[] iris_lms_idx, float[] theta_x_y_vec)
        {

            float[] mean = new float[] { 0, 0, 0 };
            for (int i = 0; i < 32; i++)
            {
                mean[0] += eye[i * 3];
                mean[1] += eye[i * 3 + 1];
                mean[2] += eye[i * 3 + 2];
            }
            mean[0] /= 32;
            mean[1] /= 32;
            mean[2] /= 32;

            float[] p_iris = new float[8 * 3];
            for (int i = 0; i < 8; i++)
            {
                int ind = iris_lms_idx[i];
                p_iris[i * 3] = eye[ind * 3] - mean[0];
                p_iris[i * 3 + 1] = eye[ind * 3 + 1] - mean[1];
                p_iris[i * 3 + 2] = eye[ind * 3 + 2] - mean[2];
            }

            float[] mean_p_iris = new float[] { 0, 0, 0 };
            for (int i = 0; i < 8; i++)
            {
                mean_p_iris[0] += p_iris[i * 3];
                mean_p_iris[1] += p_iris[i * 3 + 1];
                mean_p_iris[2] += p_iris[i * 3 + 2];
            }
            mean_p_iris[0] /= 8;
            mean_p_iris[1] /= 8;
            mean_p_iris[2] /= 8;

            float l2norm_p_iris = (float)Math.Sqrt(mean_p_iris[0] * mean_p_iris[0] + mean_p_iris[1] * mean_p_iris[1] + mean_p_iris[2] * mean_p_iris[2]);
            theta_x_y_vec[2] = mean_p_iris[0] / l2norm_p_iris;  ///vec[0]
            theta_x_y_vec[3] = mean_p_iris[1] / l2norm_p_iris;  ///vec[1]
            theta_x_y_vec[4] = mean_p_iris[2] / l2norm_p_iris;  ///vec[2]

            //angles_from_vec
            float x = -theta_x_y_vec[4];
            float y = theta_x_y_vec[3];
            float z = -theta_x_y_vec[2];
            float theta = (float)Math.Atan2(y, x);
            float phi = (float)(Math.Atan2(Math.Sqrt(x * x + y * y), z) - Math.PI * 0.5);
            theta_x_y_vec[0] = phi;
            theta_x_y_vec[1] = theta;
        }

    }

下载

源码下载

本文来自互联网用户投稿,该文观点仅代表作者本人,不代表本站立场。本站仅提供信息存储空间服务,不拥有所有权,不承担相关法律责任。如若转载,请注明出处:/a/294303.html

如若内容造成侵权/违法违规/事实不符,请联系我们进行投诉反馈qq邮箱809451989@qq.com,一经查实,立即删除!

相关文章

书生·浦语大模型实战营第一次课堂笔记

书生浦语大模型全链路开源体系。大模型是发展通用人工智能的重要途径,是人工通用人工智能的一个重要途径。书生浦语大模型覆盖轻量级、重量级、重量级的三种不同大小模型,可用于智能客服、个人助手等领域。还介绍了书生浦语大模型的性能在多个数据集上全面超过了相似量级或相近…

PMP证书考下来要多少费用?

PMP考试形式分为&#xff1a;笔试、机考。PMP考试这里只着重介绍笔试&#xff08;大陆地区目前都是笔试&#xff09;&#xff1a; PMP认证考试在大陆内的考试一般一年举行四次&#xff0c;分别在3、6、9、12月份。2024年考试时间是3、5、8、11月份。 考试方式是笔试。考试改版…

stable diffusion 人物高级提示词(四)朝向、画面范围、远近、焦距、机位、拍摄角度

一、朝向 英文中文front view正面Profile view / from side侧面half-front view半正面Back view背面(quarter front view:1.5)四分之一正面 prompt/英文中文翻译looking at the camera看向镜头facing the camera面对镜头turned towards the camera转向镜头looking away from …

制造业企业使用SD-WAN的意义

在信息技术和制造业越来越密不可分的背景下&#xff0c;推进智能制造&#xff0c;需要升级网络支撑工业互联网平台的搭建、数字化车间、智能工厂的建设等等。SD-WAN的应用使得制造业企业网络升级更为方便、快捷、低成本。 制造业企业总部、分支机构、工厂一般分布较为分散&…

大数据开发个人简历范本(2024最新版-附模板)

大数据开发工程师个人简历范本> 男 22 本科 张三 计算机科学与技术 1234567890 个人概述 具备深入的Hadoop大数据运维工程师背景&#xff0c;熟悉相关技术和工具 具备良好的团队合作能力&#xff0c;善于沟通和协作 具有快速学习新知识和解决问题的能力 对于数据科学…

Kafka 如何保证消息不丢失?

今天分享的这道面试题&#xff0c;是一个工作 2 年的小伙伴私信给我的。 我觉得这个问题比较简单&#xff0c;本来不打算说&#xff0c;但是&#xff0c;唉~ 作为新的 UP 主满足粉丝的基本要求&#xff0c;才能获得更多的点赞呀~是吧。 关于“ Kafka 如何保证消息不丢失 ”这…

最新自动化测试面试题总结(答案+文档)

1、你做了几年的测试、自动化测试&#xff0c;说一下 selenium 的原理是什么&#xff1f; 我做了五年的测试&#xff0c;1年的自动化测试&#xff1b; selenium 它是用 http 协议来连接 webdriver &#xff0c;客户端可以使用 Java 或者 Python 各种编程语言来实现&#xff1…

Echarts图表柱状图x轴数据过多,堆叠处理

前言&#xff1a; 用echarts会遇到这种情况&#xff0c;以柱状图为例子&#xff0c;当数据过多时&#xff0c;echarts图就会堆叠在一起&#xff0c;看起来十分难看。通常是通过配置dataZoom来解决问题&#xff0c;但是这不是最佳的处理方案&#xff0c;我们可以根据柱状图X轴数…

使用代理IP保护爬虫访问隐私数据的方法探讨

目录 前言 1. 获取代理IP列表 2. 随机选择代理IP 3. 使用代理IP发送请求 4. 处理代理IP异常 总结 前言 保护爬虫访问隐私数据是一个重要的安全问题。为了保障用户的隐私&#xff0c;很多网站会采取限制措施&#xff0c;如封禁IP或限制访问频率。为了绕过这些限制&#x…

JetBrains Rider使用总结

简介&#xff1a; JetBrains Rider 诞生于2016年&#xff0c;一款适配于游戏开发人员&#xff0c;是JetBrains旗下一款非常年轻的跨平台 .NET IDE。目前支持包括.NET 桌面应用、服务和库、Unity 和 Unreal Engine 游戏、Xamarin 、ASP.NET 和 ASP.NET Core web 等多种应用程序…

aspose通过开始和结束位置关键词截取word另存为新文件

关键词匹配实体类&#xff1a; Data EqualsAndHashCode(callSuper false) public class TextConfig implements Serializable {private static final long serialVersionUID 1L;/*** 开始关键词&#xff0c;多个逗号分隔*/private String textStart ;/*** 结束关键词&#x…

接口自动化-mysql和其他数据操作

学习目的&#xff1a; 1、理解接口测试中数据的存储位置以及常见的存放数据的方式 2、重点介绍python连接数据库的方法 3、把python连接数据库的方法封装&#xff0c;应用到项目中 4、再介绍下操作init文件的方法 具体学习内容&#xff1a; 1、理解接口测试中数据的…

java: java.lang.ExceptionInInitializerError com.sun.tools.javac.code.TypeTags

背景 rebuild 时候报错 解决 jdk版本不匹配&#xff0c;不兼容&#xff0c;降低或者升级即可。

17周刷题(6~10)

编写int fun(char s[])函数&#xff0c;将八进制参数串s转换为十进制整数返回&#xff0c;若传入"77777"返回32767。 #include<string.h> int fun(char s[]) {int i strlen(s)-1, h 0, q 1;while (i>0) {h (s[i] - 0) * q;q * 8;i--;}return h; } …

(vue)el-popover鼠标移入提示效果

(vue)el-popover鼠标移入提示效果 效果&#xff1a; 代码&#xff1a; <el-form-itemv-for"(item,index) of ele.algorithmParameters":key"index":label"item.parametersName"class"descInput" ><el-input v-model"i…

商城小程序(5.商品列表)

目录 一、定义请求参数对象二、获取商品列表数据三、渲染商品列表结构四、把商品item封装为自定义组件五、使用过滤器处理价格六、上拉加载更多七、下拉刷新八、点击商品item项跳转到详情页面 这章主要完成商品列表页面的编写&#xff1a;位于subpkg分包下的goods_list页面 一…

消化性溃疡与胃肠道微生物群

谷禾健康 据柳叶刀统计&#xff0c;消化性溃疡(PUD)每年影响全球400万人&#xff0c;据估计普通人群终生患病率为5−10%(Lanas A et al., 2017)。尽管消化性溃疡的全球患病率在过去几十年中有所下降&#xff0c;但其并发症的发生率却保持不变。 消化性溃疡是指胃或十二指肠黏膜…

k Nearest Neighbour(KNN)建模

目录 介绍&#xff1a; 一、建模 二、调参 2.1手动调参 2.2 GridSearchCV调参 2.3RandomizedSearchCV调参 介绍&#xff1a; K最近邻&#xff08;K-Nearest Neighbors&#xff0c;KNN&#xff09;是一种基本的分类和回归算法。它的基本思想是对未知样本进行预测时&#…

16、Kubernetes核心技术 - 节点选择器、亲和和反亲和

目录 一、概述 二、节点名称 - nodeName 二、节点选择器 - nodeSelector 三、节点亲和性和反亲和性 3.1、亲和性和反亲和性 3.2、节点硬亲和性 3.3、节点软亲和性 3.4、节点反亲和性 3.5、注意点 四、Pod亲和性和反亲和性 4.1、亲和性和反亲和性 4.2、Pod亲和性/反…

爱了!水浸监测这个技术,看了都说好!

在当今社会&#xff0c;水浸事件可能对各类场所和设施造成严重的损害&#xff0c;从而威胁到财产安全和业务持续性。 水浸监控系统不仅仅是一种技术手段&#xff0c;更是对于防范水灾的一项战略性解决方案。水浸监控系统实时了解局势并采取迅速而有效的措施&#xff0c;从而最大…