This tutorial is part of a Collection: 02. DirectX 10 - Braynzar Soft Tutorials
1418
views
12. Textures
Here we will learn how to add textures to our objects. This lesson modifies the Transformation lesson code.
Here we will learn how to map textures to our objects. This lesson modifies the Transformations lesson, so if you get lost here, you might want to backtrack to the lesson on transformations to see if you missed something. I will try my best to explain every new and modified line here. I want to mention, we have changed the code to explicitly define the box. Although this adds a little more code in creating the box and vertex structure, it should make things easier when it comes to texture mapping. In Direct3D, we use a 2D (u,v) coordinate system to map the texture onto an object. The u-axis runs horizontaly to the image, and the v-axis runs vertically, where u is 0-1 (0 being the start of the lenth of the image, and 1 being the end of the length of the image). So half the image's horizontal length is at .5, even if the actual image lenth is 256 pixels. +[http://www.braynzarsoft.net/image/100113][Texture Coordinates] Now what would happen if we changed the value of u and v to over one? say, 2? You guessed it! it would repeat the texture, like shown below. +[http://www.braynzarsoft.net/image/100114][Texture Coordinates] Here we globaly initialize a shader resource view for our texture, as textures themselves are not bound directly to the pipeline. Then we initialize the diffuse variable which will be sent to the effects file. We will explain these in more detail when we get to them. ID3D10ShaderResourceView* DiffuseMapResourceView; ID3D10EffectShaderResourceVariable* fxDiffuseMapVar; Remember a quad in Direct3D is made up of 2 or more triangles. For each triange we want a texture mapped to, we need to define a corresponding triangle in the texture. To do this we need to modify our vertex structure to include a 2D vector which will hold our texture coords. Another thing we have done to the vertex structure is overload it. I did this so mapping the texture to our cubes will be more explicit and easier. What this will do is let us define each part of the vertex at a time, for example, maybe we want to set the x value in the position of the vertex, we have access to it now by using "Vertex.pos.x". Not only that, we can now define a vertex by calling its constructor, making things more simple. If you do not understand this completely, its ok, but you might want to look up overloading to get a better idea whats going on. struct Vertex { ////////////////////new////////////////////////////////////////////////////////////////////// Vertex(){} Vertex(float x, float y, float z, float u, float v) : pos(x,y,z), texCoord(u,v){} ////////////////////new////////////////////////////////////////////////////////////////////// D3DXVECTOR3 pos; ////////////////////new////////////////////////////////////////////////////////////////////// D3DXVECTOR2 texCoord; ////////////////////new////////////////////////////////////////////////////////////////////// }; Next up is our newly defined cube. As you can see, we have defined EVERY face in the cube. This way, we can define our texture coordinates for each face. Vertex v[24]; // Front Face v[0] = Vertex(-1.0f, -1.0f, -1.0f, 0.0f, 1.0f); v[1] = Vertex(-1.0f, 1.0f, -1.0f, 0.0f, 0.0f); v[2] = Vertex( 1.0f, 1.0f, -1.0f, 1.0f, 0.0f); v[3] = Vertex( 1.0f, -1.0f, -1.0f, 1.0f, 1.0f); // Back Face v[4] = Vertex(-1.0f, -1.0f, 1.0f, 1.0f, 1.0f); v[5] = Vertex( 1.0f, -1.0f, 1.0f, 0.0f, 1.0f); v[6] = Vertex( 1.0f, 1.0f, 1.0f, 0.0f, 0.0f); v[7] = Vertex(-1.0f, 1.0f, 1.0f, 1.0f, 0.0f); // Top Face v[8] = Vertex(-1.0f, 1.0f, -1.0f, 0.0f, 1.0f); v[9] = Vertex(-1.0f, 1.0f, 1.0f, 0.0f, 0.0f); v[10] = Vertex( 1.0f, 1.0f, 1.0f, 1.0f, 0.0f); v[11] = Vertex( 1.0f, 1.0f, -1.0f, 1.0f, 1.0f); // Bottom Face v[12] = Vertex(-1.0f, -1.0f, -1.0f, 1.0f, 1.0f); v[13] = Vertex( 1.0f, -1.0f, -1.0f, 0.0f, 1.0f); v[14] = Vertex( 1.0f, -1.0f, 1.0f, 0.0f, 0.0f); v[15] = Vertex(-1.0f, -1.0f, 1.0f, 1.0f, 0.0f); // Left Face v[16] = Vertex(-1.0f, -1.0f, 1.0f, 0.0f, 1.0f); v[17] = Vertex(-1.0f, 1.0f, 1.0f, 0.0f, 0.0f); v[18] = Vertex(-1.0f, 1.0f, -1.0f, 1.0f, 0.0f); v[19] = Vertex(-1.0f, -1.0f, -1.0f, 1.0f, 1.0f); // Right Face v[20] = Vertex( 1.0f, -1.0f, -1.0f, 0.0f, 1.0f); v[21] = Vertex( 1.0f, 1.0f, -1.0f, 0.0f, 0.0f); v[22] = Vertex( 1.0f, 1.0f, 1.0f, 1.0f, 0.0f); v[23] = Vertex( 1.0f, -1.0f, 1.0f, 1.0f, 1.0f); D3D10_BUFFER_DESC vbd; vbd.Usage = D3D10_USAGE_IMMUTABLE; vbd.ByteWidth = sizeof(Vertex) * 24; vbd.BindFlags = D3D10_BIND_VERTEX_BUFFER; vbd.CPUAccessFlags = 0; vbd.MiscFlags = 0; D3D10_SUBRESOURCE_DATA vinitData; vinitData.pSysMem = v; d3dDevice->CreateBuffer(&vbd, &vinitData, &VertexBuffer); DWORD i[36]; // Front Face i[0] = 0; i[1] = 1; i[2] = 2; i[3] = 0; i[4] = 2; i[5] = 3; // Back Face i[6] = 4; i[7] = 5; i[8] = 6; i[9] = 4; i[10] = 6; i[11] = 7; // Top Face i[12] = 8; i[13] = 9; i[14] = 10; i[15] = 8; i[16] = 10; i[17] = 11; // Bottom Face i[18] = 12; i[19] = 13; i[20] = 14; i[21] = 12; i[22] = 14; i[23] = 15; // Left Face i[24] = 16; i[25] = 17; i[26] = 18; i[27] = 16; i[28] = 18; i[29] = 19; // Right Face i[30] = 20; i[31] = 21; i[32] = 22; i[33] = 20; i[34] = 22; i[35] = 23; D3D10_BUFFER_DESC ibd; ibd.Usage = D3D10_USAGE_IMMUTABLE; ibd.ByteWidth = sizeof(DWORD) * 36; ibd.BindFlags = D3D10_BIND_INDEX_BUFFER; ibd.CPUAccessFlags = 0; ibd.MiscFlags = 0; D3D10_SUBRESOURCE_DATA iinitData; iinitData.pSysMem = i; d3dDevice->CreateBuffer(&ibd, &iinitData, &IndexBuffer); Now we will modify our element description for our vertex. Remember in the transformations lessons, the second part of this was the color. Now as you can see it is the texture coordinates. Notice how we used the format "DXGI_FORMAT_R32G32_FLOAT", which lets us store 2 floats variables for our texture coordinates. D3D10_INPUT_ELEMENT_DESC layout[] = { {"POSITION", 0, DXGI_FORMAT_R32G32B32_FLOAT, 0, 0, D3D10_INPUT_PER_VERTEX_DATA, 0 }, ////////////////////new////////////////////////////////////////////////////////////////////// {"TEXCOORD", 0, DXGI_FORMAT_R32G32_FLOAT, 0, 12, D3D10_INPUT_PER_VERTEX_DATA, 0} ////////////////////new////////////////////////////////////////////////////////////////////// }; Remember texture resources are not bound directly to the rendering pipeline, but a shader resource view to the texture is. So to get the image, we need to do two things, first load the image from the file using D3DX10CreateTextureFromFile and store it in an ID3D10Texture2D object, then create the shader resource view to the texture to send to the pipeline using ID3D10Device::CreateShaderResourceView and store it in and ID3D10ShaderResourceView object. We can do both these things in one step, using the following D3DX function: HRESULT D3DX10CreateShaderResourceViewFromFile( ID3D10Device *pDevice, LPCTSTR pSrcFile, D3DX10_IMAGE_LOAD_INFO *pLoadInfo, ID3DX10ThreadPump *pPump, ID3D10ShaderResourceView **ppShaderResourceView, HRESULT *pHResult ); pDevice - Pointer to the D3D device which we will create the texture with (d3dDevice). pSrcFile - The filename of the image you wish to load. pLoadInfo - This is optional information for the image. By specifying NULL, the image dimensions in the file will be used as the texture dimensions, and a full mipmap chain will be created, which is what we usually want. pPump - This is also optional. It's used to load the resource in a separate thread. Specifying NULL loads the resource in the main thread. ppShaderResourceView - This returns a pointer to our created shader resource view of the texture we loaded from the file. pHResult - Specify NULL here if pPump was also specified NULL. We will use this image in this application D3DX10CreateShaderResourceViewFromFile(d3dDevice, L"braynzar.jpg", 0, 0, &DiffuseMapResourceView, 0 ); This is where we obtain a pointer to our effect files Texture2D object. We will go over the effect file for this lesson in a moment. fxDiffuseMapVar = FX->GetVariableByName("DiffuseMap")->AsShaderResource(); The exciting part! After we have a pointer to an effects Texture2D object, we use this line to update it. This line fills the effect file's Texture2D with the shader resource view we created earlier. We update the effect file's Texture2D every time we want a new texture. For example, if we want one cube to have a flower on it, and the other cube to be a crate, we will need to update the Texture2D object between each drawing. after we update the Texture2D object, we need to call "Apply". example: fxDiffuseMapVar->SetResource(flowerMapResourceView); pass->Apply(0); //Draw cube fxDiffuseMapVar->SetResource(crateMapResourceView); pass->Apply(0); //Draw cube fxDiffuseMapVar->SetResource(DiffuseMapResourceView); That's it for the main app. Now we need to explain the effect file. The first new line you will see creating a variable to hold our texture in so we can use it in the pixel shader. Texture2D DiffuseMap; The Texture2D represents a texture in an effect file, below is another object associated with textures. This is called a SamplerState object. This is where we define the filter to use with our texture. Samplers are of the D3D10_FILTER enumerated type. Here are some examples of different filters: **D3D10_FILTER_MIN_MAG_MIP_POINT **Use point sampling for minification, magnification, and mip-level sampling. **D3D10_FILTER_MIN_MAG_POINT_MIP_LINEAR **Use point sampling for minification and magnification; use linear interpolation for mip-level sampling. **D3D10_FILTER_MIN_POINT_MAG_LINEAR_MIP_POINT **Use point sampling for minification; use linear interpolation for magnification; use point sampling for mip-level sampling. **D3D10_FILTER_MIN_POINT_MAG_MIP_LINEAR **Use point sampling for minification; use linear interpolation for magnification and mip-level sampling. **D3D10_FILTER_MIN_LINEAR_MAG_MIP_POINT **Use linear interpolation for minification; use point sampling for magnification and mip-level sampling. **D3D10_FILTER_MIN_LINEAR_MAG_POINT_MIP_LINEAR **Use linear interpolation for minification; use point sampling for magnification; use linear interpolation for mip-level sampling. **D3D10_FILTER_MIN_MAG_LINEAR_MIP_POINT **Use linear interpolation for minification and magnification; use point sampling for mip-level sampling. **D3D10_FILTER_MIN_MAG_MIP_LINEAR **Use linear interpolation for minification, magnification, and mip-level sampling. **D3D10_FILTER_ANISOTROPIC **Use anisotropic interpolation for minification, magnification, and mip-level sampling. **D3D10_FILTER_COMPARISON_MIN_MAG_MIP_POINT **Use point sampling for minification, magnification, and mip-level sampling. Compare the result to the comparison value. **D3D10_FILTER_COMPARISON_MIN_MAG_POINT_MIP_LINEAR **Use point sampling for minification and magnification; use linear interpolation for mip-level sampling. Compare the result to the comparison value. **D3D10_FILTER_COMPARISON_MIN_POINT_MAG_LINEAR_MIP_POINT **Use point sampling for minification; use linear interpolation for magnification; use point sampling for mip-level sampling. Compare the result to the comparison value. **D3D10_FILTER_COMPARISON_MIN_POINT_MAG_MIP_LINEAR **Use point sampling for minification; use linear interpolation for magnification and mip-level sampling. Compare the result to the comparison value. **D3D10_FILTER_COMPARISON_MIN_LINEAR_MAG_MIP_POINT **Use linear interpolation for minification; use point sampling for magnification and mip-level sampling. Compare the result to the comparison value. **D3D10_FILTER_COMPARISON_MIN_LINEAR_MAG_POINT_MIP_LINEAR **Use linear interpolation for minification; use point sampling for magnification; use linear interpolation for mip-level sampling. Compare the result to the comparison value. **D3D10_FILTER_COMPARISON_MIN_MAG_LINEAR_MIP_POINT **Use linear interpolation for minification and magnification; use point sampling for mip-level sampling. Compare the result to the comparison value. **D3D10_FILTER_COMPARISON_MIN_MAG_MIP_LINEAR **Use linear interpolation for minification, magnification, and mip-level sampling. Compare the result to the comparison value. **D3D10_FILTER_COMPARISON_ANISOTROPIC **Use anisotropic interpolation for minification, magnification, and mip-level sampling. Compare the result to the comparison value. **D3D10_FILTER_TEXT_1BIT **For use in pixel shaders with textures that have the R1_UNORM format. ^^from msdn ;) SamplerState TriLinearSample { Filter = MIN_MAG_MIP_LINEAR; }; We have modified our VS_OUTPUT structure to include texture coordinates instead of color. float2 texCoord : TEXCOORD; Next, you can see we have changed our VS parameters to also include a variable for the texture coordinates instead of color, since the texture will determine the color of each pixel in the rendered geometry. VS_OUTPUT VS(float4 inPos : POSITION, float2 inTexCoord : TEXCOORD) In the vertex shader, we changed it so we are passing the texture coordinates to the pixel shader instead of color once again. output.texCoord = inTexCoord; We use the Texture2D::Sample method to sample a texture in an effect file. We pass a SamplerState object into the methods first parameter, and the (u, v) texture coordinates into the second parameter. This method returns the interpolated color from the texture at the (u, v) texture coordinate using the filtering method specified by the SamplerState object. float4 diffuse = DiffuseMap.Sample( TriLinearSample, input.texCoord ); And last we set the pixels color to the corresponding texel in the texture we have stored! return diffuse; That's that for simply adding textures, of course, there are much more complicated things we can do with textures, but this lessons was designed for simplicity, hope it helps! Here's the final code: main.cpp #include <Windows.h> #include <d3d10.h> #include <d3dx10.h> #include <string> #pragma comment(lib, "D3D10.lib") #pragma comment(lib, "d3dx10d.lib") LPCTSTR WndClassName = L"firstwindow"; HWND hwnd = NULL; const int Width = 800; const int Height = 600; bool InitializeWindow(HINSTANCE hInstance, int ShowWnd, int width, int height, bool windowed); HRESULT hr; ID3D10Device* d3dDevice; IDXGISwapChain* SwapChain; ID3D10RenderTargetView* RenderTargetView; ID3D10Effect* FX; ID3D10InputLayout* VertexLayout; ID3D10Buffer* VertexBuffer; ID3D10Buffer* IndexBuffer; ID3D10EffectTechnique* Technique; ID3D10DepthStencilView* DepthStencilView; ID3D10Texture2D* DepthStencilBuffer; ////////////////////new////////////////////////////////////////////////////////////////////// ID3D10ShaderResourceView* DiffuseMapResourceView; ID3D10EffectShaderResourceVariable* fxDiffuseMapVar; ////////////////////new////////////////////////////////////////////////////////////////////// ID3D10EffectMatrixVariable* fxWVPVar; D3DXMATRIX WVP; D3DXMATRIX World; D3DXMATRIX View; D3DXMATRIX Projection; D3DXVECTOR3 Position; D3DXVECTOR3 Target; D3DXVECTOR3 Up; D3DXMATRIX Rotation; D3DXMATRIX Scale; D3DXMATRIX Translation; D3DXMATRIX Transformations; float rot = 0.01f; bool InitializeDirect3dApp(HINSTANCE hInstance); bool InitScene(); void DrawScene(); bool ReleaseObjects(); int messageloop(); LRESULT CALLBACK WndProc(HWND hWnd, UINT msg, WPARAM wParam, LPARAM lParam); struct Vertex { ////////////////////new////////////////////////////////////////////////////////////////////// Vertex(){} Vertex(float x, float y, float z, float u, float v) : pos(x,y,z), texCoord(u,v){} ////////////////////new////////////////////////////////////////////////////////////////////// D3DXVECTOR3 pos; ////////////////////new////////////////////////////////////////////////////////////////////// D3DXVECTOR2 texCoord; ////////////////////new////////////////////////////////////////////////////////////////////// }; int WINAPI WinMain(HINSTANCE hInstance, //Main windows function HINSTANCE hPrevInstance, LPSTR lpCmdLine, int nShowCmd) { if(!InitializeWindow(hInstance, nShowCmd, Width, Height, true)) { MessageBox(0, L"Window Initialization - Failed", L"Error", MB_OK); return 0; } if(!InitializeDirect3dApp(hInstance)) { MessageBox(0, L"Direct3D Initialization - Failed", L"Error", MB_OK); return 0; } if(!InitScene()) { MessageBox(0, L"Scene Initialization - Failed", L"Error", MB_OK); return 0; } messageloop(); if(!ReleaseObjects()) { MessageBox(0, L"Object Releasing - Failed", L"Error", MB_OK); return 0; } return 0; } bool InitializeWindow(HINSTANCE hInstance, int ShowWnd, int width, int height, bool windowed) { typedef struct _WNDCLASS { UINT cbSize; UINT style; WNDPROC lpfnWndProc; int cbClsExtra; int cbWndExtra; HANDLE hInstance; HICON hIcon; HCURSOR hCursor; HBRUSH hbrBackground; LPCTSTR lpszMenuName; LPCTSTR lpszClassName; } WNDCLASS; WNDCLASSEX wc; wc.cbSize = sizeof(WNDCLASSEX); wc.style = CS_HREDRAW | CS_VREDRAW; wc.lpfnWndProc = WndProc; wc.cbClsExtra = NULL; wc.cbWndExtra = NULL; wc.hInstance = hInstance; wc.hIcon = LoadIcon(NULL, IDI_APPLICATION); wc.hCursor = LoadCursor(NULL, IDC_ARROW); wc.hbrBackground = (HBRUSH)(COLOR_WINDOW + 2); wc.lpszMenuName = NULL; wc.lpszClassName = WndClassName; wc.hIconSm = LoadIcon(NULL, IDI_APPLICATION); if (!RegisterClassEx(&wc)) { MessageBox(NULL, L"Error registering class", L"Error", MB_OK | MB_ICONERROR); return 1; } hwnd = CreateWindowEx( NULL, WndClassName, L"Window Title", WS_OVERLAPPEDWINDOW, CW_USEDEFAULT, CW_USEDEFAULT, width, height, NULL, NULL, hInstance, NULL ); if (!hwnd) { MessageBox(NULL, L"Error creating window", L"Error", MB_OK | MB_ICONERROR); return 1; } ShowWindow(hwnd, ShowWnd); UpdateWindow(hwnd); return true; } bool InitializeDirect3dApp(HINSTANCE hInstance) { UINT createDeviceFlags = 0; D3D10_DRIVER_TYPE driverTypes[] = { D3D10_DRIVER_TYPE_HARDWARE, D3D10_DRIVER_TYPE_REFERENCE, }; UINT numDriverTypes = sizeof( driverTypes ) / sizeof( driverTypes[0] ); DXGI_SWAP_CHAIN_DESC scd; scd.BufferDesc.Width = Width; scd.BufferDesc.Height = Height; scd.BufferDesc.RefreshRate.Numerator = 60; scd.BufferDesc.RefreshRate.Denominator = 1; scd.BufferDesc.Format = DXGI_FORMAT_R8G8B8A8_UNORM; scd.BufferDesc.ScanlineOrdering = DXGI_MODE_SCANLINE_ORDER_UNSPECIFIED; scd.BufferDesc.Scaling = DXGI_MODE_SCALING_UNSPECIFIED; //no multisampling scd.SampleDesc.Count = 1; scd.SampleDesc.Quality = 0; scd.BufferUsage = DXGI_USAGE_RENDER_TARGET_OUTPUT; scd.BufferCount = 1; scd.OutputWindow = hwnd; scd.Windowed = true; scd.SwapEffect = DXGI_SWAP_EFFECT_DISCARD; scd.Flags = 0; D3D10CreateDeviceAndSwapChain(0, D3D10_DRIVER_TYPE_HARDWARE, 0, 0, D3D10_SDK_VERSION, &scd, &SwapChain, &d3dDevice); ID3D10Texture2D* backBuffer; SwapChain->GetBuffer(0, _uuidof(ID3D10Texture2D), reinterpret_cast<void**>(&backBuffer)); d3dDevice->CreateRenderTargetView(backBuffer, 0, &RenderTargetView); backBuffer->Release(); D3D10_TEXTURE2D_DESC depthStencilDesc; depthStencilDesc.Width = Width; depthStencilDesc.Height = Height; depthStencilDesc.MipLevels = 1; depthStencilDesc.ArraySize = 1; depthStencilDesc.Format = DXGI_FORMAT_D24_UNORM_S8_UINT; depthStencilDesc.SampleDesc.Count = 1; depthStencilDesc.SampleDesc.Quality = 0; depthStencilDesc.Usage = D3D10_USAGE_DEFAULT; depthStencilDesc.BindFlags = D3D10_BIND_DEPTH_STENCIL; depthStencilDesc.CPUAccessFlags = 0; depthStencilDesc.MiscFlags = 0; d3dDevice->CreateTexture2D(&depthStencilDesc, NULL, &DepthStencilBuffer); d3dDevice->CreateDepthStencilView(DepthStencilBuffer, NULL, &DepthStencilView); d3dDevice->OMSetRenderTargets(1, &RenderTargetView, DepthStencilView); // Setup the viewport D3D10_VIEWPORT vp; vp.Width = Width; vp.Height = Height; vp.MinDepth = 0.0f; vp.MaxDepth = 1.0f; vp.TopLeftX = 0; vp.TopLeftY = 0; d3dDevice->RSSetViewports( 1, &vp ); D3DXMatrixIdentity( &World ); Position = D3DXVECTOR3( 0.0f, 6.0f, -14.0f ); Target = D3DXVECTOR3( 0.0f, 0.0f, 0.0f ); Up = D3DXVECTOR3( 0.0f, 1.0f, 0.0f ); D3DXMatrixLookAtLH( &View, &Position, &Target, &Up ); return true; } bool InitScene() { ////////////////////new////////////////////////////////////////////////////////////////////// Vertex v[24]; // Front Face v[0] = Vertex(-1.0f, -1.0f, -1.0f, 0.0f, 1.0f); v[1] = Vertex(-1.0f, 1.0f, -1.0f, 0.0f, 0.0f); v[2] = Vertex( 1.0f, 1.0f, -1.0f, 1.0f, 0.0f); v[3] = Vertex( 1.0f, -1.0f, -1.0f, 1.0f, 1.0f); // Back Face v[4] = Vertex(-1.0f, -1.0f, 1.0f, 1.0f, 1.0f); v[5] = Vertex( 1.0f, -1.0f, 1.0f, 0.0f, 1.0f); v[6] = Vertex( 1.0f, 1.0f, 1.0f, 0.0f, 0.0f); v[7] = Vertex(-1.0f, 1.0f, 1.0f, 1.0f, 0.0f); // Top Face v[8] = Vertex(-1.0f, 1.0f, -1.0f, 0.0f, 1.0f); v[9] = Vertex(-1.0f, 1.0f, 1.0f, 0.0f, 0.0f); v[10] = Vertex( 1.0f, 1.0f, 1.0f, 1.0f, 0.0f); v[11] = Vertex( 1.0f, 1.0f, -1.0f, 1.0f, 1.0f); // Bottom Face v[12] = Vertex(-1.0f, -1.0f, -1.0f, 1.0f, 1.0f); v[13] = Vertex( 1.0f, -1.0f, -1.0f, 0.0f, 1.0f); v[14] = Vertex( 1.0f, -1.0f, 1.0f, 0.0f, 0.0f); v[15] = Vertex(-1.0f, -1.0f, 1.0f, 1.0f, 0.0f); // Left Face v[16] = Vertex(-1.0f, -1.0f, 1.0f, 0.0f, 1.0f); v[17] = Vertex(-1.0f, 1.0f, 1.0f, 0.0f, 0.0f); v[18] = Vertex(-1.0f, 1.0f, -1.0f, 1.0f, 0.0f); v[19] = Vertex(-1.0f, -1.0f, -1.0f, 1.0f, 1.0f); // Right Face v[20] = Vertex( 1.0f, -1.0f, -1.0f, 0.0f, 1.0f); v[21] = Vertex( 1.0f, 1.0f, -1.0f, 0.0f, 0.0f); v[22] = Vertex( 1.0f, 1.0f, 1.0f, 1.0f, 0.0f); v[23] = Vertex( 1.0f, -1.0f, 1.0f, 1.0f, 1.0f); D3D10_BUFFER_DESC vbd; vbd.Usage = D3D10_USAGE_IMMUTABLE; vbd.ByteWidth = sizeof(Vertex) * 24; vbd.BindFlags = D3D10_BIND_VERTEX_BUFFER; vbd.CPUAccessFlags = 0; vbd.MiscFlags = 0; D3D10_SUBRESOURCE_DATA vinitData; vinitData.pSysMem = v; d3dDevice->CreateBuffer(&vbd, &vinitData, &VertexBuffer); DWORD i[36]; // Front Face i[0] = 0; i[1] = 1; i[2] = 2; i[3] = 0; i[4] = 2; i[5] = 3; // Back Face i[6] = 4; i[7] = 5; i[8] = 6; i[9] = 4; i[10] = 6; i[11] = 7; // Top Face i[12] = 8; i[13] = 9; i[14] = 10; i[15] = 8; i[16] = 10; i[17] = 11; // Bottom Face i[18] = 12; i[19] = 13; i[20] = 14; i[21] = 12; i[22] = 14; i[23] = 15; // Left Face i[24] = 16; i[25] = 17; i[26] = 18; i[27] = 16; i[28] = 18; i[29] = 19; // Right Face i[30] = 20; i[31] = 21; i[32] = 22; i[33] = 20; i[34] = 22; i[35] = 23; D3D10_BUFFER_DESC ibd; ibd.Usage = D3D10_USAGE_IMMUTABLE; ibd.ByteWidth = sizeof(DWORD) * 36; ibd.BindFlags = D3D10_BIND_INDEX_BUFFER; ibd.CPUAccessFlags = 0; ibd.MiscFlags = 0; D3D10_SUBRESOURCE_DATA iinitData; iinitData.pSysMem = i; d3dDevice->CreateBuffer(&ibd, &iinitData, &IndexBuffer); ////////////////////new////////////////////////////////////////////////////////////////////// UINT stride = sizeof( Vertex ); UINT offset = 0; d3dDevice->IASetVertexBuffers( 0, 1, &VertexBuffer, &stride, &offset ); d3dDevice->IASetIndexBuffer(IndexBuffer, DXGI_FORMAT_R32_UINT, 0); D3D10_INPUT_ELEMENT_DESC layout[] = { {"POSITION", 0, DXGI_FORMAT_R32G32B32_FLOAT, 0, 0, D3D10_INPUT_PER_VERTEX_DATA, 0 }, ////////////////////new////////////////////////////////////////////////////////////////////// {"TEXCOORD", 0, DXGI_FORMAT_R32G32_FLOAT, 0, 12, D3D10_INPUT_PER_VERTEX_DATA, 0} ////////////////////new////////////////////////////////////////////////////////////////////// }; ID3D10Blob* compilationErrors = 0; HRESULT hr = 0; hr = D3DX10CreateEffectFromFile( L"vertex.fx", NULL, NULL, "fx_4_0", D3D10_SHADER_ENABLE_STRICTNESS, 0, d3dDevice, NULL, NULL, &FX, &compilationErrors, NULL ); if(FAILED(hr)) { MessageBoxA(0, (char*)compilationErrors->GetBufferPointer(), 0, 0); compilationErrors->Release(); return false; } ////////////////////new////////////////////////////////////////////////////////////////////// D3DX10CreateShaderResourceViewFromFile(d3dDevice, L"braynzar.jpg", 0, 0, &DiffuseMapResourceView, 0 ); ////////////////////new////////////////////////////////////////////////////////////////////// Technique = FX->GetTechniqueByName( "Tech" ); fxWVPVar = FX->GetVariableByName("WVP")->AsMatrix(); ////////////////////new////////////////////////////////////////////////////////////////////// fxDiffuseMapVar = FX->GetVariableByName("DiffuseMap")->AsShaderResource(); ////////////////////new////////////////////////////////////////////////////////////////////// D3D10_PASS_DESC PassDesc; Technique->GetPassByIndex( 0 )->GetDesc( &PassDesc ); d3dDevice->CreateInputLayout( layout, 2, PassDesc.pIAInputSignature, PassDesc.IAInputSignatureSize, &VertexLayout ); d3dDevice->IASetInputLayout( VertexLayout ); d3dDevice->IASetPrimitiveTopology( D3D10_PRIMITIVE_TOPOLOGY_TRIANGLELIST ); return true; } bool ReleaseObjects() { if( d3dDevice ) d3dDevice->ClearState(); if( VertexBuffer ) VertexBuffer->Release(); if( IndexBuffer ) IndexBuffer->Release(); if( VertexLayout ) VertexLayout->Release(); if( FX ) FX->Release(); if( RenderTargetView ) RenderTargetView->Release(); if( SwapChain ) SwapChain->Release(); if( d3dDevice ) d3dDevice->Release(); ////////////////////new////////////////////////////////////////////////////////////////////// if( DiffuseMapResourceView ) DiffuseMapResourceView->Release(); ////////////////////new////////////////////////////////////////////////////////////////////// return true; } void DrawScene() { //Draw Scene Here D3DXCOLOR bgColor( 0.0f, 0.0f, 0.0f, 1.0f); d3dDevice->ClearRenderTargetView( RenderTargetView, bgColor ); d3dDevice->ClearDepthStencilView(DepthStencilView, D3D10_CLEAR_DEPTH|D3D10_CLEAR_STENCIL, 1.0f, 0); D3DXMatrixPerspectiveFovLH(&Projection, 0.4f*3.14f, Width/Height, 1.0f, 1000.0f); D3DXVECTOR3 rotaxis(0.0f, 1.0f, 0.0f); D3DXMatrixRotationAxis(&Rotation, &rotaxis, rot); D3DXMatrixTranslation( &Translation, 0.0f, 0.0f, 4.0f ); Transformations =Translation * Rotation; rot += .0005f; WVP = World * Transformations * View * Projection; fxWVPVar->SetMatrix((float*)&WVP); ////////////////////new////////////////////////////////////////////////////////////////////// fxDiffuseMapVar->SetResource(DiffuseMapResourceView); ////////////////////new////////////////////////////////////////////////////////////////////// D3D10_TECHNIQUE_DESC techDesc; Technique->GetDesc( &techDesc ); //draw first cube for( UINT p = 0; p < techDesc.Passes; ++p ) { Technique->GetPassByIndex( p )->Apply( 0 ); d3dDevice->DrawIndexed(36, 0, 0); } D3DXMatrixRotationAxis(&Rotation, &rotaxis, -rot); D3DXMatrixScaling( &Scale, 1.3f, 1.3f, 1.3f ); Transformations = Rotation * Scale; WVP = World * Transformations * View * Projection; fxWVPVar->SetMatrix((float*)&WVP); //draw second cube for( UINT p = 0; p < techDesc.Passes; ++p ) { Technique->GetPassByIndex( p )->Apply( 0 ); d3dDevice->DrawIndexed(36, 0, 0); } SwapChain->Present( 0, 0 ); } int messageloop(){ MSG msg; ZeroMemory(&msg, sizeof(MSG)); while(true) { BOOL PeekMessageL( LPMSG lpMsg, HWND hWnd, UINT wMsgFilterMin, UINT wMsgFilterMax, UINT wRemoveMsg ); if (PeekMessage(&msg, NULL, 0, 0, PM_REMOVE)) { if (msg.message == WM_QUIT) break; TranslateMessage(&msg); DispatchMessage(&msg); } else{ // run game code DrawScene(); } } return msg.wParam; } LRESULT CALLBACK WndProc(HWND hwnd, UINT msg, WPARAM wParam, LPARAM lParam) { switch( msg ) { case WM_KEYDOWN: if( wParam == VK_ESCAPE ){ if(MessageBox(0, L"Are you sure you want to exit?", L"Really?", MB_YESNO | MB_ICONQUESTION) == IDYES) DestroyWindow(hwnd); } return 0; case WM_DESTROY: PostQuitMessage(0); return 0; } return DefWindowProc(hwnd, msg, wParam, lParam); } vertex.fx cbuffer cbPerObject { float4x4 WVP; }; Texture2D DiffuseMap; SamplerState TriLinearSample { Filter = MIN_MAG_MIP_LINEAR; }; struct VS_OUTPUT //output structure for vertex shader { float4 Pos : SV_POSITION; float2 texCoord : TEXCOORD; }; // Vertex Shader VS_OUTPUT VS(float4 inPos : POSITION, float2 inTexCoord : TEXCOORD) { VS_OUTPUT output = (VS_OUTPUT)0; output.Pos = mul(inPos, WVP); output.texCoord = inTexCoord; return output; //send color and position to pixel shader } // Pixel Shader float4 PS(VS_OUTPUT input) : SV_Target { float4 diffuse = DiffuseMap.Sample( TriLinearSample, input.texCoord ); return diffuse; // Set the color of the pixel to the corresponding texel in the loaded image is. } technique10 Tech { pass P0 { SetVertexShader( CompileShader( vs_4_0, VS() ) ); SetPixelShader( CompileShader( ps_4_0, PS() ) ); } }
Sign in to comment