跟着cherno手搓游戏引擎【22】CameraController

前置:

YOTO.h: 

#pragma once

//用于YOTO APP

#include "YOTO/Application.h"
#include"YOTO/Layer.h"
#include "YOTO/Log.h"

#include"YOTO/Core/Timestep.h"

#include"YOTO/Input.h"
#include"YOTO/KeyCode.h"
#include"YOTO/MouseButtonCodes.h"
#include "YOTO/OrthographicCameraController.h"

#include"YOTO/ImGui/ImGuiLayer.h"

//Renderer
#include"YOTO/Renderer/Renderer.h"
#include"YOTO/Renderer/RenderCommand.h"

#include"YOTO/Renderer/Buffer.h"
#include"YOTO/Renderer/Shader.h"
#include"YOTO/Renderer/Texture.h"
#include"YOTO/Renderer/VertexArray.h"

#include"YOTO/Renderer/OrthographicCamera.h"



//入口点
#include"YOTO/EntryPoint.h"

 添加方法:

OrthographicCamera.h:

#pragma once
#include <glm/glm.hpp>
namespace YOTO {
	class OrthographicCamera
	{
	public:
		OrthographicCamera(float left, float right, float bottom, float top);

		void SetProjection(float left, float right, float bottom, float top);

		const glm::vec3& GetPosition()const { return m_Position; }
		void SetPosition(const glm::vec3& position) {
			m_Position = position;
			RecalculateViewMatrix();
		}

		float GetRotation()const { return m_Rotation; }
		void SetRotation(float rotation) {
			m_Rotation = rotation;
			RecalculateViewMatrix();
		}

		const glm::mat4& GetProjectionMatrix()const { return m_ProjectionMatrix; }
		const glm::mat4& GetViewMatrix()const { return m_ViewMatrix; }
		const glm::mat4& GetViewProjectionMatrix()const { return m_ViewProjectionMatrix; }
	private:
		void RecalculateViewMatrix();
	private:
		glm::mat4 m_ProjectionMatrix;
		glm::mat4 m_ViewMatrix;
		glm::mat4 m_ViewProjectionMatrix;
		glm::vec3 m_Position = { 0.0f ,0.0f ,0.0f };
		float m_Rotation = 0.0f;

	};

}

OrthographicCamera.cpp:

#include "ytpch.h"
#include "OrthographicCamera.h"
#include <glm/gtc/matrix_transform.hpp>
namespace YOTO {
	OrthographicCamera::OrthographicCamera(float left, float right, float bottom, float top)
		:m_ProjectionMatrix(glm::ortho(left, right, bottom, top)), m_ViewMatrix(1.0f)
	{
		m_ViewProjectionMatrix = m_ProjectionMatrix * m_ViewMatrix;
	}

	void OrthographicCamera::SetProjection(float left, float right, float bottom, float top) {
		m_ProjectionMatrix = glm::ortho(left, right, bottom, top);
		m_ViewProjectionMatrix = m_ProjectionMatrix * m_ViewMatrix;
	}
	void OrthographicCamera::RecalculateViewMatrix()
	{
		glm::mat4 transform = glm::translate(glm::mat4(1.0f), m_Position) *
			glm::rotate(glm::mat4(1.0f), glm::radians(m_Rotation), glm::vec3(0, 0, 1));
		m_ViewMatrix = glm::inverse(transform);
		m_ViewProjectionMatrix = m_ProjectionMatrix * m_ViewMatrix;
	}
}

封装控制类:

OrthographicCameraController.h: 

#pragma once
#include "YOTO/Renderer/OrthographicCamera.h"
#include"YOTO/Core/Timestep.h"
#include "YOTO/Event/ApplicationEvent.h"
#include "YOTO/Event/MouseEvent.h"
namespace YOTO {
	class OrthographicCameraController {
	public:
		OrthographicCameraController(float aspectRatio, bool rotation=false );//0,1280
		void OnUpdate(Timestep ts);
		void OnEvent(Event& e);
		OrthographicCamera& GetCamera() {
			return m_Camera
				;
		}
		const OrthographicCamera& GetCamera()const  {
			return m_Camera
				;
		}

	private:
		bool OnMouseScrolled(MouseScrolledEvent &e);
		bool OnWindowResized(WindowResizeEvent& e);
	private:

		float m_AspectRatio;//横纵比
		float m_ZoomLevel = 1.0f;
		OrthographicCamera m_Camera;

		bool m_Rotation;

		glm::vec3 m_CameraPosition = {0.0f,0.0f,0.0f};
		float m_CameraRotation = 0.0f;
		float m_CameraTranslationSpeed = 1.0f, m_CameraRotationSpeed = 180.0f;
	};
}

 OrthographicCameraController.cpp: 

#include "ytpch.h"
#include "OrthographicCameraController.h"
#include"YOTO/Input.h"
#include <YOTO/KeyCode.h>
namespace YOTO {

	OrthographicCameraController::OrthographicCameraController(float aspectRatio, bool rotation)
		:m_AspectRatio(aspectRatio),m_Camera(-m_AspectRatio*m_ZoomLevel,m_AspectRatio*m_ZoomLevel,-m_ZoomLevel,m_ZoomLevel),m_Rotation(rotation)
	{

	}

	 
	void OrthographicCameraController::OnUpdate(Timestep ts)
	{
		if (Input::IsKeyPressed(YT_KEY_A)) {
			m_CameraPosition.x -= m_CameraTranslationSpeed * ts;
		}
		else if (Input::IsKeyPressed(YT_KEY_D)) {
			m_CameraPosition.x += m_CameraTranslationSpeed * ts;
		}
		if (Input::IsKeyPressed(YT_KEY_S)) {
			m_CameraPosition.y -= m_CameraTranslationSpeed * ts;
		}
		else if (Input::IsKeyPressed(YT_KEY_W)) {
			m_CameraPosition.y += m_CameraTranslationSpeed * ts;
		}
		if (m_Rotation) {
			if (Input::IsKeyPressed(YT_KEY_Q)) {
				m_CameraRotation += m_CameraRotationSpeed * ts;
			}
			else if (Input::IsKeyPressed(YT_KEY_E)) {
				m_CameraRotation -= m_CameraRotationSpeed * ts;
			}
			m_Camera.SetRotation(m_CameraRotation);
		}
		m_Camera.SetPosition(m_CameraPosition);
		m_CameraTranslationSpeed = m_ZoomLevel;
		}

	
	void OrthographicCameraController::OnEvent(Event& e)
	{
		EventDispatcher dispatcher(e);
		dispatcher.Dispatch<MouseScrolledEvent>(YT_BIND_EVENT_FN(OrthographicCameraController::OnMouseScrolled));
		dispatcher.Dispatch<WindowResizeEvent>(YT_BIND_EVENT_FN(OrthographicCameraController::OnWindowResized));
	}
	bool OrthographicCameraController::OnMouseScrolled(MouseScrolledEvent& e)
	{
		m_ZoomLevel -= e.GetYOffset()*0.5f;
		m_ZoomLevel = std::max(m_ZoomLevel, 0.25f);
		m_Camera.SetProjection(-m_AspectRatio * m_ZoomLevel, m_AspectRatio * m_ZoomLevel, -m_ZoomLevel, m_ZoomLevel);
		return false;
	}
	bool OrthographicCameraController::OnWindowResized(WindowResizeEvent& e)
	{
		m_AspectRatio = (float)e.GetWidth()/(float) e.GetHeight();
		m_Camera.SetProjection(-m_AspectRatio * m_ZoomLevel, m_AspectRatio * m_ZoomLevel, -m_ZoomLevel, m_ZoomLevel);
		return false;
	}
}

SandboxApp.cpp:

#include<YOTO.h>
#include "imgui/imgui.h"
#include<stdio.h>
#include <glm/gtc/matrix_transform.hpp>
#include <Platform/OpenGL/OpenGLShader.h>
#include <glm/gtc/type_ptr.hpp>


class ExampleLayer:public YOTO::Layer
{
public:
	ExampleLayer()
	:Layer("Example"),  m_CameraController(1280.0f/720.0f,true){
		uint32_t indices[3] = { 0,1,2 };
		float vertices[3 * 7] = {
			-0.5f,-0.5f,0.0f, 0.8f,0.2f,0.8f,1.0f,
			0.5f,-0.5f,0.0f,  0.2f,0.3f,0.8f,1.0f,
			0.0f,0.5f,0.0f,   0.8f,0.8f,0.2f,1.0f,
		};

		m_VertexArray.reset(YOTO::VertexArray::Create());


		YOTO::Ref<YOTO::VertexBuffer> m_VertexBuffer;
		m_VertexBuffer.reset(YOTO::VertexBuffer::Create(vertices, sizeof(vertices)));

		{
			YOTO::BufferLayout setlayout = {

	{YOTO::ShaderDataType::Float3,"a_Position"},
		{YOTO::ShaderDataType::Float4,"a_Color"}
			};
			m_VertexBuffer->SetLayout(setlayout);

		}

		m_VertexArray->AddVertexBuffer(m_VertexBuffer);


		YOTO::Ref<YOTO::IndexBuffer>m_IndexBuffer;
		m_IndexBuffer.reset(YOTO::IndexBuffer::Create(indices, sizeof(indices) / sizeof(uint32_t)));

		m_VertexArray->AddIndexBuffer(m_IndexBuffer);

		std::string vertexSource = R"(
		#version 330 core
		layout(location = 0) in vec3 a_Position;
		layout(location = 1) in vec4 a_Color;
		uniform mat4 u_ViewProjection;
		uniform mat4 u_Transform;
		out vec3 v_Position;
		out vec4 v_Color;
		void main(){
		v_Position=a_Position;
		v_Color=a_Color;
		gl_Position =u_ViewProjection *u_Transform* vec4( a_Position,1.0);
		}
		)";
		//绘制颜色
		std::string fragmentSource = R"(
		#version 330 core
		layout(location = 0) out vec4 color;
		in vec3 v_Position;
		in vec4 v_Color;
		void main(){
		color=vec4(v_Color);
		}
		)";
		m_Shader=(YOTO::Shader::Create("VertexPosColor", vertexSource, fragmentSource));

		///测试/

		m_SquareVA.reset(YOTO::VertexArray::Create());

		float squareVertices[5 * 4] = {
			-0.5f,-0.5f,0.0f, 0.0f,0.0f,
			0.5f,-0.5f,0.0f,  1.0f,0.0f,
			0.5f,0.5f,0.0f,   1.0f,1.0f,
			-0.5f,0.5f,0.0f,  0.0f,1.0f,
		};
		YOTO::Ref<YOTO::VertexBuffer> squareVB;
		squareVB.reset(YOTO::VertexBuffer::Create(squareVertices, sizeof(squareVertices)));
		squareVB->SetLayout({
			{YOTO::ShaderDataType::Float3,"a_Position"},
				{YOTO::ShaderDataType::Float2,"a_TexCoord"}
			});
		m_SquareVA->AddVertexBuffer(squareVB);
		uint32_t squareIndices[6] = { 0,1,2,2,3,0 };
		YOTO::Ref<YOTO::IndexBuffer> squareIB;

		squareIB.reset((YOTO::IndexBuffer::Create(squareIndices, sizeof(squareIndices) / sizeof(uint32_t))));

		m_SquareVA->AddIndexBuffer(squareIB);

		//测试:
		std::string BlueShaderVertexSource = R"(
		#version 330 core
		layout(location = 0) in vec3 a_Position;
		uniform mat4 u_ViewProjection;
		uniform mat4 u_Transform;

		out vec3 v_Position;
		void main(){
		v_Position=a_Position;
		gl_Position =u_ViewProjection*u_Transform*vec4( a_Position,1.0);
		}
		)";
		//绘制颜色
		std::string BlueShaderFragmentSource = R"(
		#version 330 core
		layout(location = 0) out vec4 color;

		in vec3 v_Position;
		uniform vec3 u_Color;
		void main(){
		color=vec4(u_Color,1.0);
		}
		)";
		m_BlueShader=(YOTO::Shader::Create("FlatColor", BlueShaderVertexSource, BlueShaderFragmentSource));

	auto textureShader=	m_ShaderLibrary.Load("assets/shaders/Texture.glsl");
		m_Texture=YOTO::Texture2D::Create("assets/textures/Checkerboard.png");
		m_ChernoLogo= YOTO::Texture2D::Create("assets/textures/ChernoLogo.png");
		std::dynamic_pointer_cast<YOTO::OpenGLShader>(textureShader)->Bind();
		std::dynamic_pointer_cast<YOTO::OpenGLShader>(textureShader)->UploadUniformInt("u_Texture", 0);
	}
	void OnImGuiRender() override {
		ImGui::Begin("设置");
		ImGui::ColorEdit3("正方形颜色", glm::value_ptr(m_SquareColor));
		ImGui::End();
	}
	void OnUpdate(YOTO::Timestep ts)override {
		//update
		m_CameraController.OnUpdate(ts);
		//YT_CLIENT_TRACE("delta time {0}s ({1}ms)", ts.GetSeconds(), ts.GetMilliseconds());
	

	
		//Render
		YOTO::RenderCommand::SetClearColor({ 0.2f, 0.2f, 0.2f, 1.0f });
		YOTO::RenderCommand::Clear();

		YOTO::Renderer::BeginScene(m_CameraController.GetCamera());
		{
			static glm::mat4 scale = glm::scale(glm::mat4(1.0f), glm::vec3(0.1f)); 
			glm::vec4  redColor(0.8f, 0.3f, 0.3f, 1.0f);
			glm::vec4  blueColor(0.2f, 0.3f, 0.8f, 1.0f);

	/*		YOTO::MaterialRef material = new YOTO::MaterialRef(m_FlatColorShader);
			YOTO::MaterialInstaceRef mi = new YOTO::MaterialInstaceRef(material);
			mi.setValue("u_Color",redColor);
			mi.setTexture("u_AlbedoMap", texture);
			squreMesh->SetMaterial(mi);*/

			std::dynamic_pointer_cast<YOTO::OpenGLShader>(m_BlueShader)->Bind();
			std::dynamic_pointer_cast<YOTO::OpenGLShader>(m_BlueShader)->UploadUniformFloat3("u_Color",m_SquareColor);
			for (int y = 0; y < 20; y++) {
				for (int x = 0; x <20; x++)
				{

					glm::vec3 pos(x * 0.105f,y* 0.105f, 0.0);
					glm::mat4 transform = glm::translate(glm::mat4(1.0f), pos) * scale;
				/*	if (x % 2 == 0) {
						m_BlueShader->UploadUniformFloat4("u_Color", redColor);
					}
					else {
						m_BlueShader->UploadUniformFloat4("u_Color", blueColor);
					}*/
		

					YOTO::Renderer::Submit(m_BlueShader, m_SquareVA, transform);
				}
			}
			
			auto textureShader = m_ShaderLibrary.Get("Texture");
			m_Texture->Bind();
			YOTO::Renderer::Submit(textureShader, m_SquareVA, glm::scale(glm::mat4(1.0f), glm::vec3(1.5f)));
			m_ChernoLogo->Bind();
			YOTO::Renderer::Submit(textureShader, m_SquareVA, glm::scale(glm::mat4(1.0f), glm::vec3(1.5f)));

			//YOTO::Renderer::Submit(m_Shader, m_VertexArray);


		
		}	
		YOTO::Renderer::EndScene();

		
		//YOTO::Renderer3D::BeginScene(m_Scene);
		//YOTO::Renderer2D::BeginScene(m_Scene);
	}
	void OnEvent(YOTO::Event& e)override {

		m_CameraController.OnEvent(e);
		/*if (event.GetEventType() == YOTO::EventType::KeyPressed) {
		YOTO:: KeyPressedEvent& e = (YOTO::KeyPressedEvent&)event;
		YT_CLIENT_TRACE("ExampleLayer:{0}",(char)e.GetKeyCode());
		if (e.GetKeyCode()==YT_KEY_TAB) {
			YT_CLIENT_INFO("ExampleLayerOnEvent:TAB按下了");
		}}*/
		//YT_CLIENT_TRACE("SandBoxApp:测试event{0}", event);




	}


private:
	YOTO::ShaderLibrary m_ShaderLibrary;
	YOTO::Ref<YOTO::Shader> m_Shader;
	YOTO::Ref<YOTO::VertexArray> m_VertexArray;


	YOTO::Ref<YOTO::Shader> m_BlueShader;
	YOTO::Ref<YOTO::VertexArray> m_SquareVA;
	
	YOTO::Ref<YOTO::Texture2D> m_Texture,m_ChernoLogo;

	YOTO::OrthographicCameraController m_CameraController;
	glm::vec3 m_SquareColor = { 0.2f,0.3f,0.7f };

};


class Sandbox:public YOTO::Application
{
public:
	Sandbox(){
		PushLayer(new ExampleLayer());
		//PushLayer(new YOTO::ImGuiLayer());
	}
	~Sandbox() {

	}

private:

};

YOTO::Application* YOTO::CreateApplication() {
	printf("helloworld");
	return new Sandbox();
}

测试:

 cool,随便写点儿,水一期

本文来自互联网用户投稿,该文观点仅代表作者本人,不代表本站立场。本站仅提供信息存储空间服务,不拥有所有权,不承担相关法律责任。如若转载,请注明出处:/a/374227.html

如若内容造成侵权/违法违规/事实不符,请联系我们进行投诉反馈qq邮箱809451989@qq.com,一经查实,立即删除!

相关文章

基于最新koa的Node.js后端API架构与MVC模式

Koa 是一个由 Express 原班人马打造的现代 Web 框架&#xff0c;用于 Node.js。它旨在提供一个更小、更富有表现力和更强大的基础&#xff0c;用于 Web 应用和 API 开发。Koa 不捆绑任何中间件&#xff0c;它提供了一个优雅的方法以组合不同的中间件来处理请求和响应。 Koa 的核…

Golang与Erlang有什么差异

Golang和Erlang是两种备受关注的编程语言&#xff0c;它们各自具有独特的特点和优势。下面我将简单的探讨一下Golang和Erlang之间的差异&#xff0c;并且分析它们在并发模型、运行环境、函数式编程和领域特性等多个方面的不同之处。 并发模型 Golang使用goroutines和channels…

从 AGP 4.1.2 升级到 7.5.1——动态添加仓库

AGP 升级问题 Build was configured to prefer settings repositories over project repositories but repository ‘maven4’ was added by plugin ‘***’ 添加仓库警告信息说&#xff1a; 依赖查找以你在 setting.gradle 文件配置的仓库为准&#xff08;因为你配置了 PRE…

『运维备忘录』之 Yum 命令详解

运维人员不仅要熟悉操作系统、服务器、网络等只是&#xff0c;甚至对于开发相关的也要有所了解。很多运维工作者可能一时半会记不住那么多命令、代码、方法、原理或者用法等等。这里我将结合自身工作&#xff0c;持续给大家更新运维工作所需要接触到的知识点&#xff0c;希望大…

【多模态大模型】视觉大模型SAM:如何使模型能够处理任意图像的分割任务?

SAM&#xff1a;如何使模型能够处理任意图像的分割任务&#xff1f; 核心思想起始问题: 如何使模型能够处理任意图像的分割任务&#xff1f;5why分析5so分析 总结子问题1: 如何编码输入图像以适应分割任务&#xff1f;子问题2: 如何处理各种形式的分割提示&#xff1f;子问题3:…

Redis服务

目录 介绍 特点 缓存 安装 安装单机版redis实例 1、创建工作目录 2、下载对应的redis包 3、解压到创建好的工作目录 4、安装编译工具 5、进入解压后的目录执行make编译 6、配置环境变量 7、备份配置文件 8、修改配置文件 9、创建存放数据的目录 配置redis为syste…

Django连接Mysql

修改setting.py配置文件 连接前&#xff0c;需要创建数据库 安装mysql客户端 因为连接需要一个客户端&#xff0c;而python没有客户端&#xff0c;所以就需要一个客户端来接收你填写的参数 pip install mysqlclient

【知识图谱+大模型的紧耦合新范式】Think-on-Graph:解决大模型在医疗、法律、金融等垂直领域的幻觉

Think-on-Graph&#xff1a;解决大模型在医疗、法律、金融等垂直领域的幻觉 Think-on-Graph 原理ToG 算法步骤&#xff1a;想想再查&#xff0c;查查再想实验结果 论文&#xff1a;https://arxiv.org/abs/2307.07697 代码&#xff1a;https://github.com/IDEA-FinAI/ToG Think…

JAVA面试汇总总结更新中ing

本人面试积累面试题 多线程微服务JVMKAFKAMYSQLRedisSpringBoot/Spring 1.面向对象的三个特征 封装&#xff0c;继承&#xff0c;多态&#xff0c;有时候也会加上抽象。 2.多态的好处 允许不同类对象对同一消息做出响应&#xff0c;即同一消息可以根据发送对象的不同而采用多种…

GPIO结构

GPIO简介 GPIO(General Purpose Input Output)通用输入输出口 可配置为8种输入输出模式 引脚电平&#xff1a;0V~3.3V,部分引脚可容忍5V 输出模式下可控制端口输出高低电平,用以驱动LED、控制蜂鸣器、模拟通信协议输出时序等 输入模式下可读取端口的高低电平或电压&#x…

Linux实验记录:使用DHCP动态管理主机地址

前言&#xff1a; 本文是一篇关于Linux系统初学者的实验记录。 参考书籍&#xff1a;《Linux就该这么学》 实验环境&#xff1a; VmwareWorkStation 17——虚拟机软件 RedHatEnterpriseLinux[RHEL]8——红帽操作系统 备注&#xff1a; 动态主机配置协议&#xff08;DHCP&…

MongoDB从入门到实战之Docker快速安装MongoDB

前言 在上一篇文章中带领带同学们快速入门MongoDB这个文档型的NoSQL数据库&#xff0c;让大家快速的了解了MongoDB的基本概念。这一章开始我们就开始实战篇教程&#xff0c;为了快速把MongoDB使用起来我将会把MongoDB在Docker容器中安装起来作为开发环境使用。然后我这边MongoD…

关于RabbitMQ面试题汇总

什么是消息队列&#xff1f;消息队列有什么用&#xff1f; 消息队列是一种在应用程序之间传递消息的通信机制。它是一种典型的生产者-消费者模型&#xff0c;其中生产者负责生成消息并将其发送到队列中&#xff0c;而消费者则从队列中获取消息并进行处理。消息队列的主要目的是…

【无标题】卫星数据链开发平台设计方案:522-基于AD9988的四通道1G带宽卫星数据链开发平台基于AD9988的四通道1G带宽卫星数据链开发平台

卫星数据链开发平台设计方案&#xff1a;522-基于AD9988的四通道1G带宽卫星数据链开发平台 基于AD9988的四通道1G带宽卫星数据链开发平台 一、卫星数据链综 卫星通信是卫星互联网的重要应用之一&#xff0c;主要指通过或借助卫星进行数据通信&#xff0c;可广泛应用于移…

紫光展锐摘得多项行业桂冠,综合实力获认可

近期&#xff0c;紫光展锐凭借在技术和产品创新领域的实力和成果&#xff0c;接连斩获多项行业重要荣誉。 紫光展锐首颗5G NTN-IoT卫星通信SoC芯片V8821荣获第11届手机设计大赛天鹅奖“2023年度最佳终端解决方案奖”、2023年度ICT产业龙虎“2023年度最佳5G技术创新”、2023年度…

【机器学习】基于集成学习的 Amazon 用户评论质量预测

实验六: 基于集成学习的 Amazon 用户评论质量预测 1 案例简介 ​ 随着电商平台的兴起&#xff0c;以及疫情的持续影响&#xff0c;线上购物在我们的日常生活中扮演着越来越重要的角色。在进行线上商品挑选时&#xff0c;评论往往是我们十分关注的一个方面。然而目前电商网站的…

论文阅读——MP-Former

MP-Former: Mask-Piloted Transformer for Image Segmentation https://arxiv.org/abs/2303.07336 mask2former问题是&#xff1a;相邻层得到的掩码不连续&#xff0c;差别很大 denoising training非常有效地稳定训练时期之间的二分匹配。去噪训练的关键思想是将带噪声的GT坐标…

对象存储minio

参考Linux搭建免费开源对象存储 wget https://dl.minio.io/server/minio/release/linux-amd64/minio yum install -y wget yum install -y wget wget https://dl.minio.io/server/minio/release/linux-amd64/minio chmod x minio sudo mv minio /usr/local/bin/ minio --vers…

编译DuiLib库遇到的变量定义位置问题

C89 规定&#xff0c;所有局部变量都必须定义在函数开头&#xff0c;在定义好变量之前不能有其他的执行语句&#xff1b; C99 标准取消这这条限制&#xff0c;但是 VC/VS 对 C99 的支持不是很积极&#xff1b; C99 是 C89 的升级版&#xff1b; 如图是修改之后的代码&#xff1…

海光--性能领先实干者

这是ren_dong的第26篇原创 1、概述 国产X86 CPU领航者 海光信息是一家从事高端处理器、加速器等计算芯片产品和系统研发的公司 &#xff0c;是国产X86 CPU领航者海光处理器兼容市场主流的 X86 指令集&#xff0c;具有成熟而丰富的应用生态环境公司股权结构合理 &#xff0c;聚集…