跟着cherno手搓游戏引擎【22】CameraController、Resize

前置:

YOTO.h: 

#pragma once

//用于YOTO APP

#include "YOTO/Application.h"
#include"YOTO/Layer.h"
#include "YOTO/Log.h"

#include"YOTO/Core/Timestep.h"

#include"YOTO/Input.h"
#include"YOTO/KeyCode.h"
#include"YOTO/MouseButtonCodes.h"
#include "YOTO/OrthographicCameraController.h"

#include"YOTO/ImGui/ImGuiLayer.h"

//Renderer
#include"YOTO/Renderer/Renderer.h"
#include"YOTO/Renderer/RenderCommand.h"

#include"YOTO/Renderer/Buffer.h"
#include"YOTO/Renderer/Shader.h"
#include"YOTO/Renderer/Texture.h"
#include"YOTO/Renderer/VertexArray.h"

#include"YOTO/Renderer/OrthographicCamera.h"



//入口点
#include"YOTO/EntryPoint.h"

 添加方法:

OrthographicCamera.h:

#pragma once
#include <glm/glm.hpp>
namespace YOTO {
	class OrthographicCamera
	{
	public:
		OrthographicCamera(float left, float right, float bottom, float top);

		void SetProjection(float left, float right, float bottom, float top);

		const glm::vec3& GetPosition()const { return m_Position; }
		void SetPosition(const glm::vec3& position) {
			m_Position = position;
			RecalculateViewMatrix();
		}

		float GetRotation()const { return m_Rotation; }
		void SetRotation(float rotation) {
			m_Rotation = rotation;
			RecalculateViewMatrix();
		}

		const glm::mat4& GetProjectionMatrix()const { return m_ProjectionMatrix; }
		const glm::mat4& GetViewMatrix()const { return m_ViewMatrix; }
		const glm::mat4& GetViewProjectionMatrix()const { return m_ViewProjectionMatrix; }
	private:
		void RecalculateViewMatrix();
	private:
		glm::mat4 m_ProjectionMatrix;
		glm::mat4 m_ViewMatrix;
		glm::mat4 m_ViewProjectionMatrix;
		glm::vec3 m_Position = { 0.0f ,0.0f ,0.0f };
		float m_Rotation = 0.0f;

	};

}

OrthographicCamera.cpp:

#include "ytpch.h"
#include "OrthographicCamera.h"
#include <glm/gtc/matrix_transform.hpp>
namespace YOTO {
	OrthographicCamera::OrthographicCamera(float left, float right, float bottom, float top)
		:m_ProjectionMatrix(glm::ortho(left, right, bottom, top)), m_ViewMatrix(1.0f)
	{
		m_ViewProjectionMatrix = m_ProjectionMatrix * m_ViewMatrix;
	}

	void OrthographicCamera::SetProjection(float left, float right, float bottom, float top) {
		m_ProjectionMatrix = glm::ortho(left, right, bottom, top);
		m_ViewProjectionMatrix = m_ProjectionMatrix * m_ViewMatrix;
	}
	void OrthographicCamera::RecalculateViewMatrix()
	{
		glm::mat4 transform = glm::translate(glm::mat4(1.0f), m_Position) *
			glm::rotate(glm::mat4(1.0f), glm::radians(m_Rotation), glm::vec3(0, 0, 1));
		m_ViewMatrix = glm::inverse(transform);
		m_ViewProjectionMatrix = m_ProjectionMatrix * m_ViewMatrix;
	}
}

封装控制类:

OrthographicCameraController.h: 

#pragma once
#include "YOTO/Renderer/OrthographicCamera.h"
#include"YOTO/Core/Timestep.h"
#include "YOTO/Event/ApplicationEvent.h"
#include "YOTO/Event/MouseEvent.h"
namespace YOTO {
	class OrthographicCameraController {
	public:
		OrthographicCameraController(float aspectRatio, bool rotation=false );//0,1280
		void OnUpdate(Timestep ts);
		void OnEvent(Event& e);
		OrthographicCamera& GetCamera() {
			return m_Camera
				;
		}
		const OrthographicCamera& GetCamera()const  {
			return m_Camera
				;
		}

	private:
		bool OnMouseScrolled(MouseScrolledEvent &e);
		bool OnWindowResized(WindowResizeEvent& e);
	private:

		float m_AspectRatio;//横纵比
		float m_ZoomLevel = 1.0f;
		OrthographicCamera m_Camera;

		bool m_Rotation;

		glm::vec3 m_CameraPosition = {0.0f,0.0f,0.0f};
		float m_CameraRotation = 0.0f;
		float m_CameraTranslationSpeed = 1.0f, m_CameraRotationSpeed = 180.0f;
	};
}

 OrthographicCameraController.cpp: 

#include "ytpch.h"
#include "OrthographicCameraController.h"
#include"YOTO/Input.h"
#include <YOTO/KeyCode.h>
namespace YOTO {

	OrthographicCameraController::OrthographicCameraController(float aspectRatio, bool rotation)
		:m_AspectRatio(aspectRatio),m_Camera(-m_AspectRatio*m_ZoomLevel,m_AspectRatio*m_ZoomLevel,-m_ZoomLevel,m_ZoomLevel),m_Rotation(rotation)
	{

	}

	 
	void OrthographicCameraController::OnUpdate(Timestep ts)
	{
		if (Input::IsKeyPressed(YT_KEY_A)) {
			m_CameraPosition.x -= m_CameraTranslationSpeed * ts;
		}
		else if (Input::IsKeyPressed(YT_KEY_D)) {
			m_CameraPosition.x += m_CameraTranslationSpeed * ts;
		}
		if (Input::IsKeyPressed(YT_KEY_S)) {
			m_CameraPosition.y -= m_CameraTranslationSpeed * ts;
		}
		else if (Input::IsKeyPressed(YT_KEY_W)) {
			m_CameraPosition.y += m_CameraTranslationSpeed * ts;
		}
		if (m_Rotation) {
			if (Input::IsKeyPressed(YT_KEY_Q)) {
				m_CameraRotation += m_CameraRotationSpeed * ts;
			}
			else if (Input::IsKeyPressed(YT_KEY_E)) {
				m_CameraRotation -= m_CameraRotationSpeed * ts;
			}
			m_Camera.SetRotation(m_CameraRotation);
		}
		m_Camera.SetPosition(m_CameraPosition);
		m_CameraTranslationSpeed = m_ZoomLevel;
		}

	
	void OrthographicCameraController::OnEvent(Event& e)
	{
		EventDispatcher dispatcher(e);
		dispatcher.Dispatch<MouseScrolledEvent>(YT_BIND_EVENT_FN(OrthographicCameraController::OnMouseScrolled));
		dispatcher.Dispatch<WindowResizeEvent>(YT_BIND_EVENT_FN(OrthographicCameraController::OnWindowResized));
	}
	bool OrthographicCameraController::OnMouseScrolled(MouseScrolledEvent& e)
	{
		m_ZoomLevel -= e.GetYOffset()*0.5f;
		m_ZoomLevel = std::max(m_ZoomLevel, 0.25f);
		m_Camera.SetProjection(-m_AspectRatio * m_ZoomLevel, m_AspectRatio * m_ZoomLevel, -m_ZoomLevel, m_ZoomLevel);
		return false;
	}
	bool OrthographicCameraController::OnWindowResized(WindowResizeEvent& e)
	{
		m_AspectRatio = (float)e.GetWidth()/(float) e.GetHeight();
		m_Camera.SetProjection(-m_AspectRatio * m_ZoomLevel, m_AspectRatio * m_ZoomLevel, -m_ZoomLevel, m_ZoomLevel);
		return false;
	}
}

SandboxApp.cpp:

#include<YOTO.h>
#include "imgui/imgui.h"
#include<stdio.h>
#include <glm/gtc/matrix_transform.hpp>
#include <Platform/OpenGL/OpenGLShader.h>
#include <glm/gtc/type_ptr.hpp>


class ExampleLayer:public YOTO::Layer
{
public:
	ExampleLayer()
	:Layer("Example"),  m_CameraController(1280.0f/720.0f,true){
		uint32_t indices[3] = { 0,1,2 };
		float vertices[3 * 7] = {
			-0.5f,-0.5f,0.0f, 0.8f,0.2f,0.8f,1.0f,
			0.5f,-0.5f,0.0f,  0.2f,0.3f,0.8f,1.0f,
			0.0f,0.5f,0.0f,   0.8f,0.8f,0.2f,1.0f,
		};

		m_VertexArray.reset(YOTO::VertexArray::Create());


		YOTO::Ref<YOTO::VertexBuffer> m_VertexBuffer;
		m_VertexBuffer.reset(YOTO::VertexBuffer::Create(vertices, sizeof(vertices)));

		{
			YOTO::BufferLayout setlayout = {

	{YOTO::ShaderDataType::Float3,"a_Position"},
		{YOTO::ShaderDataType::Float4,"a_Color"}
			};
			m_VertexBuffer->SetLayout(setlayout);

		}

		m_VertexArray->AddVertexBuffer(m_VertexBuffer);


		YOTO::Ref<YOTO::IndexBuffer>m_IndexBuffer;
		m_IndexBuffer.reset(YOTO::IndexBuffer::Create(indices, sizeof(indices) / sizeof(uint32_t)));

		m_VertexArray->AddIndexBuffer(m_IndexBuffer);

		std::string vertexSource = R"(
		#version 330 core
		layout(location = 0) in vec3 a_Position;
		layout(location = 1) in vec4 a_Color;
		uniform mat4 u_ViewProjection;
		uniform mat4 u_Transform;
		out vec3 v_Position;
		out vec4 v_Color;
		void main(){
		v_Position=a_Position;
		v_Color=a_Color;
		gl_Position =u_ViewProjection *u_Transform* vec4( a_Position,1.0);
		}
		)";
		//绘制颜色
		std::string fragmentSource = R"(
		#version 330 core
		layout(location = 0) out vec4 color;
		in vec3 v_Position;
		in vec4 v_Color;
		void main(){
		color=vec4(v_Color);
		}
		)";
		m_Shader=(YOTO::Shader::Create("VertexPosColor", vertexSource, fragmentSource));

		///测试/

		m_SquareVA.reset(YOTO::VertexArray::Create());

		float squareVertices[5 * 4] = {
			-0.5f,-0.5f,0.0f, 0.0f,0.0f,
			0.5f,-0.5f,0.0f,  1.0f,0.0f,
			0.5f,0.5f,0.0f,   1.0f,1.0f,
			-0.5f,0.5f,0.0f,  0.0f,1.0f,
		};
		YOTO::Ref<YOTO::VertexBuffer> squareVB;
		squareVB.reset(YOTO::VertexBuffer::Create(squareVertices, sizeof(squareVertices)));
		squareVB->SetLayout({
			{YOTO::ShaderDataType::Float3,"a_Position"},
				{YOTO::ShaderDataType::Float2,"a_TexCoord"}
			});
		m_SquareVA->AddVertexBuffer(squareVB);
		uint32_t squareIndices[6] = { 0,1,2,2,3,0 };
		YOTO::Ref<YOTO::IndexBuffer> squareIB;

		squareIB.reset((YOTO::IndexBuffer::Create(squareIndices, sizeof(squareIndices) / sizeof(uint32_t))));

		m_SquareVA->AddIndexBuffer(squareIB);

		//测试:
		std::string BlueShaderVertexSource = R"(
		#version 330 core
		layout(location = 0) in vec3 a_Position;
		uniform mat4 u_ViewProjection;
		uniform mat4 u_Transform;

		out vec3 v_Position;
		void main(){
		v_Position=a_Position;
		gl_Position =u_ViewProjection*u_Transform*vec4( a_Position,1.0);
		}
		)";
		//绘制颜色
		std::string BlueShaderFragmentSource = R"(
		#version 330 core
		layout(location = 0) out vec4 color;

		in vec3 v_Position;
		uniform vec3 u_Color;
		void main(){
		color=vec4(u_Color,1.0);
		}
		)";
		m_BlueShader=(YOTO::Shader::Create("FlatColor", BlueShaderVertexSource, BlueShaderFragmentSource));

	auto textureShader=	m_ShaderLibrary.Load("assets/shaders/Texture.glsl");
		m_Texture=YOTO::Texture2D::Create("assets/textures/Checkerboard.png");
		m_ChernoLogo= YOTO::Texture2D::Create("assets/textures/ChernoLogo.png");
		std::dynamic_pointer_cast<YOTO::OpenGLShader>(textureShader)->Bind();
		std::dynamic_pointer_cast<YOTO::OpenGLShader>(textureShader)->UploadUniformInt("u_Texture", 0);
	}
	void OnImGuiRender() override {
		ImGui::Begin("设置");
		ImGui::ColorEdit3("正方形颜色", glm::value_ptr(m_SquareColor));
		ImGui::End();
	}
	void OnUpdate(YOTO::Timestep ts)override {
		//update
		m_CameraController.OnUpdate(ts);
		//YT_CLIENT_TRACE("delta time {0}s ({1}ms)", ts.GetSeconds(), ts.GetMilliseconds());
	

	
		//Render
		YOTO::RenderCommand::SetClearColor({ 0.2f, 0.2f, 0.2f, 1.0f });
		YOTO::RenderCommand::Clear();

		YOTO::Renderer::BeginScene(m_CameraController.GetCamera());
		{
			static glm::mat4 scale = glm::scale(glm::mat4(1.0f), glm::vec3(0.1f)); 
			glm::vec4  redColor(0.8f, 0.3f, 0.3f, 1.0f);
			glm::vec4  blueColor(0.2f, 0.3f, 0.8f, 1.0f);

	/*		YOTO::MaterialRef material = new YOTO::MaterialRef(m_FlatColorShader);
			YOTO::MaterialInstaceRef mi = new YOTO::MaterialInstaceRef(material);
			mi.setValue("u_Color",redColor);
			mi.setTexture("u_AlbedoMap", texture);
			squreMesh->SetMaterial(mi);*/

			std::dynamic_pointer_cast<YOTO::OpenGLShader>(m_BlueShader)->Bind();
			std::dynamic_pointer_cast<YOTO::OpenGLShader>(m_BlueShader)->UploadUniformFloat3("u_Color",m_SquareColor);
			for (int y = 0; y < 20; y++) {
				for (int x = 0; x <20; x++)
				{

					glm::vec3 pos(x * 0.105f,y* 0.105f, 0.0);
					glm::mat4 transform = glm::translate(glm::mat4(1.0f), pos) * scale;
				/*	if (x % 2 == 0) {
						m_BlueShader->UploadUniformFloat4("u_Color", redColor);
					}
					else {
						m_BlueShader->UploadUniformFloat4("u_Color", blueColor);
					}*/
		

					YOTO::Renderer::Submit(m_BlueShader, m_SquareVA, transform);
				}
			}
			
			auto textureShader = m_ShaderLibrary.Get("Texture");
			m_Texture->Bind();
			YOTO::Renderer::Submit(textureShader, m_SquareVA, glm::scale(glm::mat4(1.0f), glm::vec3(1.5f)));
			m_ChernoLogo->Bind();
			YOTO::Renderer::Submit(textureShader, m_SquareVA, glm::scale(glm::mat4(1.0f), glm::vec3(1.5f)));

			//YOTO::Renderer::Submit(m_Shader, m_VertexArray);


		
		}	
		YOTO::Renderer::EndScene();

		
		//YOTO::Renderer3D::BeginScene(m_Scene);
		//YOTO::Renderer2D::BeginScene(m_Scene);
	}
	void OnEvent(YOTO::Event& e)override {

		m_CameraController.OnEvent(e);
		/*if (event.GetEventType() == YOTO::EventType::KeyPressed) {
		YOTO:: KeyPressedEvent& e = (YOTO::KeyPressedEvent&)event;
		YT_CLIENT_TRACE("ExampleLayer:{0}",(char)e.GetKeyCode());
		if (e.GetKeyCode()==YT_KEY_TAB) {
			YT_CLIENT_INFO("ExampleLayerOnEvent:TAB按下了");
		}}*/
		//YT_CLIENT_TRACE("SandBoxApp:测试event{0}", event);




	}


private:
	YOTO::ShaderLibrary m_ShaderLibrary;
	YOTO::Ref<YOTO::Shader> m_Shader;
	YOTO::Ref<YOTO::VertexArray> m_VertexArray;


	YOTO::Ref<YOTO::Shader> m_BlueShader;
	YOTO::Ref<YOTO::VertexArray> m_SquareVA;
	
	YOTO::Ref<YOTO::Texture2D> m_Texture,m_ChernoLogo;

	YOTO::OrthographicCameraController m_CameraController;
	glm::vec3 m_SquareColor = { 0.2f,0.3f,0.7f };

};


class Sandbox:public YOTO::Application
{
public:
	Sandbox(){
		PushLayer(new ExampleLayer());
		//PushLayer(new YOTO::ImGuiLayer());
	}
	~Sandbox() {

	}

private:

};

YOTO::Application* YOTO::CreateApplication() {
	printf("helloworld");
	return new Sandbox();
}

测试:

 cool

修改:

添加修改视口的方法:

Render.h:

#pragma once
#include"RenderCommand.h"
#include "OrthographicCamera.h"
#include"Shader.h"
namespace YOTO {

	class Renderer {
	public:


		static void Init();
		static void OnWindowResize(uint32_t width,uint32_t height);
		static void BeginScene(OrthographicCamera& camera);
		static void EndScene();

		static void Submit(const Ref<Shader>& shader, const Ref<VertexArray>& vertexArray,const glm::mat4&transform = glm::mat4(1.0f));
		
		inline static RendererAPI::API GetAPI() {
			return RendererAPI::GetAPI();
		}
	private:
		struct SceneData {
			glm::mat4 ViewProjectionMatrix;
		};
		static SceneData* m_SceneData;
	};

}

Render.cpp: 

#include"ytpch.h"
#include"Renderer.h"
#include <Platform/OpenGL/OpenGLShader.h>
namespace YOTO {
	Renderer::SceneData* Renderer::m_SceneData = new	Renderer::SceneData;
	void Renderer::Init()
	{
		RenderCommand::Init();
	}
	void Renderer::OnWindowResize(uint32_t width, uint32_t height)
	{
		RenderCommand::SetViewport(0, 0, width, height);
	}
	void Renderer::BeginScene(OrthographicCamera& camera)
	{
		m_SceneData->ViewProjectionMatrix = camera.GetViewProjectionMatrix();
	}
	void Renderer::EndScene()
	{
	}
	void Renderer::Submit(const Ref<Shader>& shader, const Ref<VertexArray>& vertexArray, const glm::mat4& transform)
	{
		shader->Bind();
		std::dynamic_pointer_cast<OpenGLShader>(shader)->UploadUniformMat4("u_ViewProjection", m_SceneData->ViewProjectionMatrix);
		std::dynamic_pointer_cast<OpenGLShader>(shader)->UploadUniformMat4("u_Transform", transform);
	/*	mi.Bind();*/

		vertexArray->Bind();
		RenderCommand::DrawIndexed(vertexArray);
	}
}

RenderCommand.h:

#pragma once
#include"RendererAPI.h"
namespace YOTO {
	class RenderCommand
	{
	public:
		inline static void Init() {
			s_RendererAPI->Init();
		}
		inline static void SetViewport(uint32_t x, uint32_t y, uint32_t width, uint32_t height) {
			s_RendererAPI->SetViewport(x,y,width,height);
		}

		inline static void SetClearColor(const glm::vec4& color) {
			s_RendererAPI->SetClearColor(color);
		}
		inline static void Clear() {
			s_RendererAPI->Clear();
		}
		inline static void DrawIndexed(const Ref<VertexArray>& vertexArray) {
			s_RendererAPI->DrawIndexed(vertexArray);
		}
		
	private:
		static RendererAPI* s_RendererAPI;

	};

}

RenderAPI.h:

#pragma once
#include<glm/glm.hpp>
#include "VertexArray.h"
namespace YOTO {
	class RendererAPI
	{
	public:
		enum class API {
			None = 0,
			OpenGL = 1
		};
	public:
		virtual void Init() = 0;
		virtual void SetClearColor(const glm::vec4& color)=0;
		virtual void SetViewport(uint32_t x, uint32_t y, uint32_t width, uint32_t height) = 0;
		virtual void Clear() = 0;
		virtual void DrawIndexed(const Ref<VertexArray>& vertexArray)=0;

		inline static API GetAPI() { return s_API; }
	private:
		static API s_API;
	};
}


OpenGLRenderAPI.h:

#pragma once
#include"YOTO/Renderer/RendererAPI.h"
namespace YOTO {
	class OpenGLRendererAPI:public RendererAPI
	{
	public:
		virtual void Init()override;
		virtual void SetViewport(uint32_t x, uint32_t y, uint32_t width, uint32_t height) override;
		virtual void SetClearColor(const glm::vec4& color)override;
		virtual void Clear()override;
		virtual void DrawIndexed(const Ref<VertexArray>& vertexArray) override;
	};
}


OpenGLRenderAPI.cpp: 

#include "ytpch.h"
#include "OpenGLRendererAPI.h"
#include <glad/glad.h>
namespace YOTO {
	void OpenGLRendererAPI::Init()
	{
		//启用混合
		glEnable(GL_BLEND);
		//设置混合函数
		glBlendFunc(GL_SRC_ALPHA,GL_ONE_MINUS_SRC_ALPHA);
	}
	void OpenGLRendererAPI::SetViewport(uint32_t x, uint32_t y, uint32_t width, uint32_t height)
	{
		glViewport(x, y, width, height);
	}
	void OpenGLRendererAPI::SetClearColor(const glm::vec4& color)
	{
		glClearColor(color.r, color.g, color.b, color.a);
	}
	void OpenGLRendererAPI::Clear()
	{
		glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
	}
	void OpenGLRendererAPI::DrawIndexed(const Ref<VertexArray>& vertexArray)
	{
		glDrawElements(GL_TRIANGLES, vertexArray->GetIndexBuffer()->GetCount(), GL_UNSIGNED_INT, nullptr);
	}
}

OrthographicCameraController.h: 

#pragma once
#include "YOTO/Renderer/OrthographicCamera.h"
#include"YOTO/Core/Timestep.h"
#include "YOTO/Event/ApplicationEvent.h"
#include "YOTO/Event/MouseEvent.h"
namespace YOTO {
	class OrthographicCameraController {
	public:
		OrthographicCameraController(float aspectRatio, bool rotation=false );//0,1280
		void OnUpdate(Timestep ts);
		void OnEvent(Event& e);
		OrthographicCamera& GetCamera() {
			return m_Camera
				;
		}
		const OrthographicCamera& GetCamera()const  {
			return m_Camera
				;
		}
		float  GetZoomLevel() { return m_ZoomLevel; }
		void SetZoomLevel(float level) { m_ZoomLevel = level; }

	private:
		bool OnMouseScrolled(MouseScrolledEvent &e);
		bool OnWindowResized(WindowResizeEvent& e);
	private:

		float m_AspectRatio;//横纵比
		float m_ZoomLevel = 1.0f;
		OrthographicCamera m_Camera;

		bool m_Rotation;

		glm::vec3 m_CameraPosition = {0.0f,0.0f,0.0f};
		float m_CameraRotation = 0.0f;
		float m_CameraTranslationSpeed = 1.0f, m_CameraRotationSpeed = 180.0f;
	};
}

OrthographicCameraController.cpp:  

#include "ytpch.h"
#include "OrthographicCameraController.h"
#include"YOTO/Input.h"
#include <YOTO/KeyCode.h>
namespace YOTO {

	OrthographicCameraController::OrthographicCameraController(float aspectRatio, bool rotation)
		:m_AspectRatio(aspectRatio),m_Camera(-m_AspectRatio*m_ZoomLevel,m_AspectRatio*m_ZoomLevel,-m_ZoomLevel,m_ZoomLevel),m_Rotation(rotation)
	{

	}

	 
	void OrthographicCameraController::OnUpdate(Timestep ts)
	{
		if (Input::IsKeyPressed(YT_KEY_A)) {
			m_CameraPosition.x -= m_CameraTranslationSpeed * ts;
		}
		else if (Input::IsKeyPressed(YT_KEY_D)) {
			m_CameraPosition.x += m_CameraTranslationSpeed * ts;
		}
		if (Input::IsKeyPressed(YT_KEY_S)) {
			m_CameraPosition.y -= m_CameraTranslationSpeed * ts;
		}
		else if (Input::IsKeyPressed(YT_KEY_W)) {
			m_CameraPosition.y += m_CameraTranslationSpeed * ts;
		}
		if (m_Rotation) {
			if (Input::IsKeyPressed(YT_KEY_Q)) {
				m_CameraRotation += m_CameraRotationSpeed * ts;
			}
			else if (Input::IsKeyPressed(YT_KEY_E)) {
				m_CameraRotation -= m_CameraRotationSpeed * ts;
			}
			m_Camera.SetRotation(m_CameraRotation);
		}
		m_Camera.SetPosition(m_CameraPosition);
		m_CameraTranslationSpeed = m_ZoomLevel;
		}

	
	void OrthographicCameraController::OnEvent(Event& e)
	{
		EventDispatcher dispatcher(e);
		dispatcher.Dispatch<MouseScrolledEvent>(YT_BIND_EVENT_FN(OrthographicCameraController::OnMouseScrolled));
		dispatcher.Dispatch<WindowResizeEvent>(YT_BIND_EVENT_FN(OrthographicCameraController::OnWindowResized));
	}
	bool OrthographicCameraController::OnMouseScrolled(MouseScrolledEvent& e)
	{
		m_ZoomLevel -= e.GetYOffset()*0.5f;
		m_ZoomLevel = std::max(m_ZoomLevel, 0.25f);
		m_Camera.SetProjection(-m_AspectRatio * m_ZoomLevel, m_AspectRatio * m_ZoomLevel, -m_ZoomLevel, m_ZoomLevel);
		return false;
	}
	bool OrthographicCameraController::OnWindowResized(WindowResizeEvent& e)
	{
		m_AspectRatio = (float)e.GetWidth()/(float) e.GetHeight();
		m_Camera.SetProjection(-m_AspectRatio * m_ZoomLevel, m_AspectRatio * m_ZoomLevel, -m_ZoomLevel, m_ZoomLevel);
		return false;
	}
}

Application.h:

#pragma once
#include"Core.h"
#include"Event/Event.h"
#include"Event/ApplicationEvent.h"
#include "YOTO/Window.h"
#include"YOTO/LayerStack.h"
#include"YOTO/ImGui/ImGuiLayer.h"

#include "YOTO/Core/Timestep.h"
#include "YOTO/Renderer/OrthographicCamera.h"
namespace YOTO {
	class YOTO_API Application
	{
	public:
		Application();
		virtual ~Application();
		void Run();
		void OnEvent(Event& e);
		void PushLayer(Layer* layer);
		void PushOverlay(Layer* layer);

		inline static Application& Get() { return *s_Instance; }
		inline Window& GetWindow() { return *m_Window; }
	private:
		bool  OnWindowClosed(WindowCloseEvent& e);
		bool  OnWindowResize(WindowResizeEvent& e);
	private:
		std::unique_ptr<Window>  m_Window;
		ImGuiLayer* m_ImGuiLayer;
		bool m_Running = true;
		bool m_Minimized = false;
		LayerStack m_LayerStack;
		Timestep m_Timestep;
		float m_LastFrameTime = 0.0f;
		 

	
	private:
		static Application* s_Instance;
	};
	//在客户端定义
	Application* CreateApplication();
}

Application.cpp: 

#include"ytpch.h"
#include "Application.h"

#include"Log.h"
#include "YOTO/Renderer/Renderer.h"
#include"Input.h"
#include <GLFW/glfw3.h>


namespace YOTO {
#define BIND_EVENT_FN(x) std::bind(&x, this, std::placeholders::_1)

	Application* Application::s_Instance = nullptr;

	Application::Application()
	{

		YT_CORE_ASSERT(!s_Instance, "Application需要为空!")
			s_Instance = this;
		//智能指针
		m_Window = std::unique_ptr<Window>(Window::Creat());
		//设置回调函数
		m_Window->SetEventCallback(BIND_EVENT_FN(Application::OnEvent));

		m_Window->SetVSync(false);

		Renderer::Init();

		//new一个Layer,放在最后层进行渲染
		m_ImGuiLayer = new ImGuiLayer();
		PushOverlay(m_ImGuiLayer);	
	}
	Application::~Application() {
	}
	/// <summary>
	/// 所有的Window事件都会在这触发,作为参数e
	/// </summary>
	/// <param name="e"></param>
	void Application::OnEvent(Event& e) {
		//根据事件类型绑定对应事件
		EventDispatcher dispatcher(e);
		dispatcher.Dispatch<WindowCloseEvent>(BIND_EVENT_FN(Application::OnWindowClosed));
		dispatcher.Dispatch<WindowResizeEvent>(BIND_EVENT_FN(Application::OnWindowResize));
		//输出事件信息
		YT_CORE_INFO("Application:{0}", e);
		for (auto it = m_LayerStack.end(); it != m_LayerStack.begin();) {
			(*--it)->OnEvent(e);
			if (e.m_Handled)
				break;
		}
	}

	bool Application::OnWindowClosed(WindowCloseEvent& e) {
		m_Running = false;
		return true;
	}
	bool Application::OnWindowResize(WindowResizeEvent& e)
	{
		if (e.GetWidth()==0||e.GetHeight()==0) {
			m_Minimized = true;
			return false;
		}
		m_Minimized = false;
		//调整视口
		Renderer::OnWindowResize(e.GetWidth(), e.GetHeight());


		return false;
	}
	void Application::Run() {
		WindowResizeEvent e(1280, 720);
		if (e.IsInCategory(EventCategoryApplication)) {
			YT_CORE_TRACE(e);
		}
		if (e.IsInCategory(EventCategoryInput)) {
			YT_CORE_ERROR(e);
		}

		while (m_Running)
		{
			float time = (float)glfwGetTime();//window平台
			Timestep timestep = time - m_LastFrameTime;
			m_LastFrameTime = time;
			if (!m_Minimized) {

				for (Layer* layer : m_LayerStack) {
					layer->OnUpdate(timestep);
				}
			}	
			//将ImGui的刷新放到APP中,与Update分开
			m_ImGuiLayer->Begin();

			for (Layer* layer : m_LayerStack) {
				layer->OnImGuiRender();
			}
			m_ImGuiLayer->End();

			

			m_Window->OnUpdate();
		}
	}
	void Application::PushLayer(Layer* layer) {
		m_LayerStack.PushLayer(layer);
		layer->OnAttach();
	}
	void Application::PushOverlay(Layer* layer) {
		m_LayerStack.PushOverlay(layer);
		layer->OnAttach();
	}
}

SandboxApp.cpp:

#include<YOTO.h>
#include "imgui/imgui.h"
#include<stdio.h>
#include <glm/gtc/matrix_transform.hpp>
#include <Platform/OpenGL/OpenGLShader.h>
#include <glm/gtc/type_ptr.hpp>


class ExampleLayer:public YOTO::Layer
{
public:
	ExampleLayer()
	:Layer("Example"),  m_CameraController(1280.0f/720.0f,true){
		uint32_t indices[3] = { 0,1,2 };
		float vertices[3 * 7] = {
			-0.5f,-0.5f,0.0f, 0.8f,0.2f,0.8f,1.0f,
			0.5f,-0.5f,0.0f,  0.2f,0.3f,0.8f,1.0f,
			0.0f,0.5f,0.0f,   0.8f,0.8f,0.2f,1.0f,
		};

		m_VertexArray.reset(YOTO::VertexArray::Create());


		YOTO::Ref<YOTO::VertexBuffer> m_VertexBuffer;
		m_VertexBuffer.reset(YOTO::VertexBuffer::Create(vertices, sizeof(vertices)));

		{
			YOTO::BufferLayout setlayout = {

	{YOTO::ShaderDataType::Float3,"a_Position"},
		{YOTO::ShaderDataType::Float4,"a_Color"}
			};
			m_VertexBuffer->SetLayout(setlayout);

		}

		m_VertexArray->AddVertexBuffer(m_VertexBuffer);


		YOTO::Ref<YOTO::IndexBuffer>m_IndexBuffer;
		m_IndexBuffer.reset(YOTO::IndexBuffer::Create(indices, sizeof(indices) / sizeof(uint32_t)));

		m_VertexArray->AddIndexBuffer(m_IndexBuffer);

		std::string vertexSource = R"(
		#version 330 core
		layout(location = 0) in vec3 a_Position;
		layout(location = 1) in vec4 a_Color;
		uniform mat4 u_ViewProjection;
		uniform mat4 u_Transform;
		out vec3 v_Position;
		out vec4 v_Color;
		void main(){
		v_Position=a_Position;
		v_Color=a_Color;
		gl_Position =u_ViewProjection *u_Transform* vec4( a_Position,1.0);
		}
		)";
		//绘制颜色
		std::string fragmentSource = R"(
		#version 330 core
		layout(location = 0) out vec4 color;
		in vec3 v_Position;
		in vec4 v_Color;
		void main(){
		color=vec4(v_Color);
		}
		)";
		m_Shader=(YOTO::Shader::Create("VertexPosColor", vertexSource, fragmentSource));

		///测试/

		m_SquareVA.reset(YOTO::VertexArray::Create());

		float squareVertices[5 * 4] = {
			-0.5f,-0.5f,0.0f, 0.0f,0.0f,
			0.5f,-0.5f,0.0f,  1.0f,0.0f,
			0.5f,0.5f,0.0f,   1.0f,1.0f,
			-0.5f,0.5f,0.0f,  0.0f,1.0f,
		};
		YOTO::Ref<YOTO::VertexBuffer> squareVB;
		squareVB.reset(YOTO::VertexBuffer::Create(squareVertices, sizeof(squareVertices)));
		squareVB->SetLayout({
			{YOTO::ShaderDataType::Float3,"a_Position"},
				{YOTO::ShaderDataType::Float2,"a_TexCoord"}
			});
		m_SquareVA->AddVertexBuffer(squareVB);
		uint32_t squareIndices[6] = { 0,1,2,2,3,0 };
		YOTO::Ref<YOTO::IndexBuffer> squareIB;

		squareIB.reset((YOTO::IndexBuffer::Create(squareIndices, sizeof(squareIndices) / sizeof(uint32_t))));

		m_SquareVA->AddIndexBuffer(squareIB);

		//测试:
		std::string BlueShaderVertexSource = R"(
		#version 330 core
		layout(location = 0) in vec3 a_Position;
		uniform mat4 u_ViewProjection;
		uniform mat4 u_Transform;

		out vec3 v_Position;
		void main(){
		v_Position=a_Position;
		gl_Position =u_ViewProjection*u_Transform*vec4( a_Position,1.0);
		}
		)";
		//绘制颜色
		std::string BlueShaderFragmentSource = R"(
		#version 330 core
		layout(location = 0) out vec4 color;

		in vec3 v_Position;
		uniform vec3 u_Color;
		void main(){
		color=vec4(u_Color,1.0);
		}
		)";
		m_BlueShader=(YOTO::Shader::Create("FlatColor", BlueShaderVertexSource, BlueShaderFragmentSource));

	auto textureShader=	m_ShaderLibrary.Load("assets/shaders/Texture.glsl");
		m_Texture=YOTO::Texture2D::Create("assets/textures/Checkerboard.png");
		m_ChernoLogo= YOTO::Texture2D::Create("assets/textures/ChernoLogo.png");
		std::dynamic_pointer_cast<YOTO::OpenGLShader>(textureShader)->Bind();
		std::dynamic_pointer_cast<YOTO::OpenGLShader>(textureShader)->UploadUniformInt("u_Texture", 0);
	}
	void OnImGuiRender() override {
		ImGui::Begin("设置");
		ImGui::ColorEdit3("正方形颜色", glm::value_ptr(m_SquareColor));
		ImGui::End();
	}
	void OnUpdate(YOTO::Timestep ts)override {
		//update
		m_CameraController.OnUpdate(ts);
		//YT_CLIENT_TRACE("delta time {0}s ({1}ms)", ts.GetSeconds(), ts.GetMilliseconds());
	

	
		//Render
		YOTO::RenderCommand::SetClearColor({ 0.2f, 0.2f, 0.2f, 1.0f });
		YOTO::RenderCommand::Clear();

		YOTO::Renderer::BeginScene(m_CameraController.GetCamera());
		{
			static glm::mat4 scale = glm::scale(glm::mat4(1.0f), glm::vec3(0.1f)); 
			glm::vec4  redColor(0.8f, 0.3f, 0.3f, 1.0f);
			glm::vec4  blueColor(0.2f, 0.3f, 0.8f, 1.0f);

	/*		YOTO::MaterialRef material = new YOTO::MaterialRef(m_FlatColorShader);
			YOTO::MaterialInstaceRef mi = new YOTO::MaterialInstaceRef(material);
			mi.setValue("u_Color",redColor);
			mi.setTexture("u_AlbedoMap", texture);
			squreMesh->SetMaterial(mi);*/

			std::dynamic_pointer_cast<YOTO::OpenGLShader>(m_BlueShader)->Bind();
			std::dynamic_pointer_cast<YOTO::OpenGLShader>(m_BlueShader)->UploadUniformFloat3("u_Color",m_SquareColor);
			for (int y = 0; y < 20; y++) {
				for (int x = 0; x <20; x++)
				{

					glm::vec3 pos(x * 0.105f,y* 0.105f, 0.0);
					glm::mat4 transform = glm::translate(glm::mat4(1.0f), pos) * scale;
				/*	if (x % 2 == 0) {
						m_BlueShader->UploadUniformFloat4("u_Color", redColor);
					}
					else {
						m_BlueShader->UploadUniformFloat4("u_Color", blueColor);
					}*/
		

					YOTO::Renderer::Submit(m_BlueShader, m_SquareVA, transform);
				}
			}
			
			auto textureShader = m_ShaderLibrary.Get("Texture");
			m_Texture->Bind();
			YOTO::Renderer::Submit(textureShader, m_SquareVA, glm::scale(glm::mat4(1.0f), glm::vec3(1.5f)));
			m_ChernoLogo->Bind();
			YOTO::Renderer::Submit(textureShader, m_SquareVA, glm::scale(glm::mat4(1.0f), glm::vec3(1.5f)));

			//YOTO::Renderer::Submit(m_Shader, m_VertexArray);


		
		}	
		YOTO::Renderer::EndScene();

		
		//YOTO::Renderer3D::BeginScene(m_Scene);
		//YOTO::Renderer2D::BeginScene(m_Scene);
	}
	void OnEvent(YOTO::Event& e)override {

		m_CameraController.OnEvent(e);
		if (e.GetEventType() == YOTO::EventType::WindowResize) {
			auto& re = (YOTO::WindowResizeEvent&)e;
	/*		float zoom = re.GetWidth() / 1280.0f;
			m_CameraController.SetZoomLevel(zoom);*/
		}




	}


private:
	YOTO::ShaderLibrary m_ShaderLibrary;
	YOTO::Ref<YOTO::Shader> m_Shader;
	YOTO::Ref<YOTO::VertexArray> m_VertexArray;


	YOTO::Ref<YOTO::Shader> m_BlueShader;
	YOTO::Ref<YOTO::VertexArray> m_SquareVA;
	
	YOTO::Ref<YOTO::Texture2D> m_Texture,m_ChernoLogo;

	YOTO::OrthographicCameraController m_CameraController;
	glm::vec3 m_SquareColor = { 0.2f,0.3f,0.7f };

};


class Sandbox:public YOTO::Application
{
public:
	Sandbox(){
		PushLayer(new ExampleLayer());
		//PushLayer(new YOTO::ImGuiLayer());
	}
	~Sandbox() {

	}

private:

};

YOTO::Application* YOTO::CreateApplication() {
	printf("helloworld");
	return new Sandbox();
}

添加了新功能,修改窗口大小在OnEvent中。

本文来自互联网用户投稿,该文观点仅代表作者本人,不代表本站立场。本站仅提供信息存储空间服务,不拥有所有权,不承担相关法律责任。如若转载,请注明出处:/a/381061.html

如若内容造成侵权/违法违规/事实不符,请联系我们进行投诉反馈qq邮箱809451989@qq.com,一经查实,立即删除!

相关文章

070:vue+cesium: 利用canvas设置线性渐变色材质

第070个 点击查看专栏目录 本示例的目的是介绍如何在vue+cesium中设置线性渐变色的材质,这里使用canvas的辅助方法。 直接复制下面的 vue+cesium源代码,操作2分钟即可运行实现效果. 文章目录 示例效果配置方式示例源代码(共104行)专栏目标示例效果 配置方式 1)查看基础…

每日五道java面试题之java基础篇(三)

第一题. switch 是否能作⽤在 byte/long/String 上&#xff1f; Java5 以前 switch(expr)中&#xff0c;expr 只能是 byte、short、char、int。从 Java 5 开始&#xff0c;Java 中引⼊了枚举类型&#xff0c; expr 也可以是 enum 类型。从 Java 7 开始&#xff0c;expr 还可以…

504. Base 7(七进制数)

题目描述 给定一个整数 num&#xff0c;将其转化为 7 进制&#xff0c;并以字符串形式输出。 问题分析 按照二进制转换的方式进行转换即可 代码 char* convertToBase7(int num) {int count 0;char *x (char *)malloc(sizeof(char)*32);char *y (char *)malloc(sizeof(c…

HCIA--NAT实验

1. 划分网段&#xff0c;配置接口IP地址&#xff0c;内网启用OSPF协议&#xff0c;并配置一对一的NAT&#xff1a; AR1配置&#xff1a; [Huawei]int g0/0/0 [Huawei-GigabitEthernet0/0/0]ip add 10.1.1.1 24 [Huawei-GigabitEthernet0/0/0]int g0/0/1 [Huawei-GigabitEther…

Kafka 入门介绍

目录 一. 前言 二. 使用场景 三. 分布式的流平台 四. Kafka 的基本术语 4.1. 主题和日志 &#xff08;Topic 和 Log&#xff09; 4.2. 分布式&#xff08;Distribution&#xff09; 4.3. 异地数据同步技术&#xff08;Geo-Replication&#xff09; 4.4. 生产者&#xf…

svg 进阶

svg 进阶 svg 应用场景 绘制 icon绘制动画 svg viewport 和 viewBox viewport 是 svg 图像的可见区域 viewBox 是用于在画布上绘制 svg 图形的坐标系统 在一下案例中 svg中 width“500” height“200” 就是可视区域 比如你的svg是100X100但是你的可视区域只有20X20 那么他…

python-游戏篇-初级-超级画板

文章目录 开发环境要求运行方法PyCharmVScode 代码main.pytools.py 效果 开发环境要求 本系统的软件开发及运行环境具体如下。 操作系统&#xff1a;Windows 7、Windows 10。Python版本&#xff1a;Python 3.7.1。开发工具&#xff1a;PyCharm 2018。Python内置模块&#xff…

redis-sentinel(哨兵模式)

目录 1、哨兵简介:Redis Sentinel 2、作用 3、工作模式 4、主观下线和客观下线 5、配置哨兵模式 希望能够帮助到大家&#xff01;&#xff01;&#xff01; 1、哨兵简介:Redis Sentinel Sentinel(哨兵)是用于监控redis集群中Master状态的工具&#xff0c;其已经被集成在re…

【Maven】依赖、构建管理 继承与聚合 快速学习(3.6.3 )

文章目录 Maven是什么&#xff1f;一、Maven安装和配置本地配置文件设置idea配置本地maven 二、基于IDEA的Maven工程创建2.1 Maven工程GAVP属性2.2 Idea构建Maven JavaEE工程 三、Maven工程项目结构说明四、Maven核心功能依赖和构建管理4.1 依赖管理和配置4.2 依赖传递和冲突4.…

Python环境下基于最大离散重叠小波变换和支持向量回归的金融时间序列预测

金融时间序列具有非线性、高频性、随机性等特点&#xff0c;其波动情况不仅与当前股票市场、房地产市场、贸易市场等有强联动性&#xff0c;而且大幅度起伏对于其他市场有较大的影响和冲击。由于金融市场受多种因素影响且各影响因素间也存在一定复杂动态交互关系&#xff0c;导…

css的布局(BFC)

一、css中常规的定位方案 1、普通流 元素按照其在HTML中的先后位置自上而下布局。 行内元素水平排列&#xff0c;当行被占满后换行&#xff1b;块级元素则会被渲染为完整的一行。 所有元素默认都是普通流定位。 2、浮动 元素首先按照普通流的位置出现&#xff0c; 然后根据浮动…

Eclipse安装配置、卸载教程(Windows版)

Eclipse是一个开放源代码的集成开发环境&#xff08;IDE&#xff09;&#xff0c;最初由IBM公司开发&#xff0c;现在由Eclipse基金会负责维护。它是一个跨平台的工具&#xff0c;可以用于开发多种编程语言&#xff0c;如Java、C/C、Python、PHP、Rust等。 Eclipse提供了一个可…

传输频宽是啥?对网速影响有多大?

频宽&#xff0c;即WIFI频道宽度&#xff0c;又称为WIFI信道宽度&#xff0c;是WiFi Channel width的缩写。从科学的定义来说&#xff0c;Wi-Fi频道宽度&#xff0c;是指Wi-Fi无线信号在频谱上所占用的带宽大小。它决定了Wi-Fi网络的数据传输速率和稳定性&#xff0c;一般有20M…

C++ 哈希表(unordered_map与unordered_set)

文章目录 unordered_map 与 unordered_set哈希表 (Hash Table)哈希函数哈希冲突模拟实现封装 补充&#xff1a;unordered_map 与 unordered_set 的使用 unordered_map 与 unordered_set 就和名字一样&#xff0c;这是 map、set 的无序版本&#xff08;数据遍历出来是无序的&am…

STM32标准库移植FreeRTOS并测试

STM32标准库移植FreeRTOS并测试 最终现象一、移植①下载FreeRTOS源码②移植步骤 二、测试三、工程项目 最终现象 主函数中创建三个任务&#xff0c;优先级都相同&#xff0c;意味着每个任务执行固定事件之后就会轮到下一个任务运行&#xff0c;由于这个时间是很短的&#xff0…

免费文字转语音工具,一款优秀且永久免费的文字转语音工具,同时拥有多种类型男声女声,支持多国语言转换,支持语速调节和下载!

一、软件简介 该工具只有一个功能&#xff0c;就是将输入框内的纯文本内容转换为指定语言的音频&#xff0c;并且可以自由调节语速及音色&#xff08;男声/女声&#xff09;&#xff0c;其内置了多种语音包&#xff0c;包含男声、女声、普通话、粤语以及方言&#xff0c;并且支…

Sodinokibi(REvil)勒索病毒最新变种,攻击Linux平台

前言 国外安全研究人员爆光了一个Linux平台上疑似Sodinokibi勒索病毒家族最新样本&#xff0c;如下所示&#xff1a; Sodinokibi(REvil)勒索病毒的详细分析以及资料可以参考笔者之前的一些文章&#xff0c;这款勒索病毒黑客组织此前一直以Windows平台为主要的攻击目标&#xf…

学习通考试怎么搜题找答案? #学习方法#微信#其他

大学生必备的做题、搜题神器&#xff0c;收录上万本教材辅助书籍&#xff0c;像什么高数、物理、计算机、外语等都有&#xff0c;资源十分丰富。 1.菜鸟教程 菜鸟教程是一个完全免费的编程学习软件。 它免费提供了HTML / CSS 、JavaScript 、服务端、移动端、XML 教程、http…

OpenEuler20.03LTS SP2 上安装 OpenGauss3.0.0 单机部署过程(二)

开始安装 OpenGauss 数据库 3.1.7 安装依赖包 (说明:如果可以联网,可以通过网络 yum 安装所需依赖包,既可以跳过本步骤。如果网络无法连通,请把本文档所在目录下的依赖包上传到服务器上,手工安装后,即无需通过网络进行 Yum 安装了): 上传:libaio-0.3.111-5.oe1.x8…

前后端通讯:前端调用后端接口的五种方式,优劣势和场景

Hi&#xff0c;我是贝格前端工场&#xff0c;专注前端开发8年了&#xff0c;前端始终绕不开的一个话题就是如何和后端交换数据&#xff08;通讯&#xff09;&#xff0c;本文先从最基础的通讯方式讲起。 一、什么是前后端通讯 前后端通讯&#xff08;Frontend-Backend Commun…