triangle-vulkan.c 79 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348134913501351135213531354135513561357135813591360136113621363136413651366136713681369137013711372137313741375137613771378137913801381138213831384138513861387138813891390139113921393139413951396139713981399140014011402140314041405140614071408140914101411141214131414141514161417141814191420142114221423142414251426142714281429143014311432143314341435143614371438143914401441144214431444144514461447144814491450145114521453145414551456145714581459146014611462146314641465146614671468146914701471147214731474147514761477147814791480148114821483148414851486148714881489149014911492149314941495149614971498149915001501150215031504150515061507150815091510151115121513151415151516151715181519152015211522152315241525152615271528152915301531153215331534153515361537153815391540154115421543154415451546154715481549155015511552155315541555155615571558155915601561156215631564156515661567156815691570157115721573157415751576157715781579158015811582158315841585158615871588158915901591159215931594159515961597159815991600160116021603160416051606160716081609161016111612161316141615161616171618161916201621162216231624162516261627162816291630163116321633163416351636163716381639164016411642164316441645164616471648164916501651165216531654165516561657165816591660166116621663166416651666166716681669167016711672167316741675167616771678167916801681168216831684168516861687168816891690169116921693169416951696169716981699170017011702170317041705170617071708170917101711171217131714171517161717171817191720172117221723172417251726172717281729173017311732173317341735173617371738173917401741174217431744174517461747174817491750175117521753175417551756175717581759176017611762176317641765176617671768176917701771177217731774177517761777177817791780178117821783178417851786178717881789179017911792179317941795179617971798179918001801180218031804180518061807180818091810181118121813181418151816181718181819182018211822182318241825182618271828182918301831183218331834183518361837183818391840184118421843184418451846184718481849185018511852185318541855185618571858185918601861186218631864186518661867186818691870187118721873187418751876187718781879188018811882188318841885188618871888188918901891189218931894189518961897189818991900190119021903190419051906190719081909191019111912191319141915191619171918191919201921192219231924192519261927192819291930193119321933193419351936193719381939194019411942194319441945194619471948194919501951195219531954195519561957195819591960196119621963196419651966196719681969197019711972197319741975197619771978197919801981198219831984198519861987198819891990199119921993199419951996199719981999200020012002200320042005200620072008200920102011201220132014201520162017201820192020202120222023202420252026202720282029203020312032203320342035203620372038203920402041204220432044204520462047204820492050205120522053205420552056205720582059206020612062206320642065206620672068206920702071207220732074207520762077207820792080208120822083208420852086208720882089209020912092209320942095209620972098209921002101210221032104210521062107210821092110211121122113211421152116211721182119212021212122212321242125212621272128212921302131213221332134
  1. /*
  2. * Copyright (c) 2015-2016 The Khronos Group Inc.
  3. * Copyright (c) 2015-2016 Valve Corporation
  4. * Copyright (c) 2015-2016 LunarG, Inc.
  5. *
  6. * Licensed under the Apache License, Version 2.0 (the "License");
  7. * you may not use this file except in compliance with the License.
  8. * You may obtain a copy of the License at
  9. *
  10. * http://www.apache.org/licenses/LICENSE-2.0
  11. *
  12. * Unless required by applicable law or agreed to in writing, software
  13. * distributed under the License is distributed on an "AS IS" BASIS,
  14. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  15. * See the License for the specific language governing permissions and
  16. * limitations under the License.
  17. *
  18. * Author: Chia-I Wu <olvaffe@gmail.com>
  19. * Author: Cody Northrop <cody@lunarg.com>
  20. * Author: Courtney Goeltzenleuchter <courtney@LunarG.com>
  21. * Author: Ian Elliott <ian@LunarG.com>
  22. * Author: Jon Ashburn <jon@lunarg.com>
  23. * Author: Piers Daniell <pdaniell@nvidia.com>
  24. * Author: Gwan-gyeong Mun <elongbug@gmail.com>
  25. * Porter: Camilla Löwy <elmindreda@glfw.org>
  26. */
  27. /*
  28. * Draw a textured triangle with depth testing. This is written against Intel
  29. * ICD. It does not do state transition nor object memory binding like it
  30. * should. It also does no error checking.
  31. */
  32. #include <stdio.h>
  33. #include <stdlib.h>
  34. #include <string.h>
  35. #include <stdbool.h>
  36. #include <assert.h>
  37. #include <signal.h>
  38. #ifdef _WIN32
  39. #include <windows.h>
  40. #endif
  41. #include <glad/vulkan.h>
  42. #define GLFW_INCLUDE_NONE
  43. #include <GLFW/glfw3.h>
  44. #define DEMO_TEXTURE_COUNT 1
  45. #define VERTEX_BUFFER_BIND_ID 0
  46. #define APP_SHORT_NAME "tri"
  47. #define APP_LONG_NAME "The Vulkan Triangle Demo Program"
  48. #define ARRAY_SIZE(a) (sizeof(a) / sizeof(a[0]))
  49. #if defined(NDEBUG) && defined(__GNUC__)
  50. #define U_ASSERT_ONLY __attribute__((unused))
  51. #else
  52. #define U_ASSERT_ONLY
  53. #endif
  54. #define ERR_EXIT(err_msg, err_class) \
  55. do { \
  56. printf(err_msg); \
  57. fflush(stdout); \
  58. exit(1); \
  59. } while (0)
  60. static GLADapiproc glad_vulkan_callback(const char* name, void* user)
  61. {
  62. return glfwGetInstanceProcAddress((VkInstance) user, name);
  63. }
  64. static const uint32_t fragShaderCode[] = {
  65. 0x07230203,0x00010000,0x00080007,0x00000014,0x00000000,0x00020011,0x00000001,0x0006000b,
  66. 0x00000001,0x4c534c47,0x6474732e,0x3035342e,0x00000000,0x0003000e,0x00000000,0x00000001,
  67. 0x0007000f,0x00000004,0x00000004,0x6e69616d,0x00000000,0x00000009,0x00000011,0x00030010,
  68. 0x00000004,0x00000007,0x00030003,0x00000002,0x00000190,0x00090004,0x415f4c47,0x735f4252,
  69. 0x72617065,0x5f657461,0x64616873,0x6f5f7265,0x63656a62,0x00007374,0x00090004,0x415f4c47,
  70. 0x735f4252,0x69646168,0x6c5f676e,0x75676e61,0x5f656761,0x70303234,0x006b6361,0x00040005,
  71. 0x00000004,0x6e69616d,0x00000000,0x00050005,0x00000009,0x61724675,0x6c6f4367,0x0000726f,
  72. 0x00030005,0x0000000d,0x00786574,0x00050005,0x00000011,0x63786574,0x64726f6f,0x00000000,
  73. 0x00040047,0x00000009,0x0000001e,0x00000000,0x00040047,0x0000000d,0x00000022,0x00000000,
  74. 0x00040047,0x0000000d,0x00000021,0x00000000,0x00040047,0x00000011,0x0000001e,0x00000000,
  75. 0x00020013,0x00000002,0x00030021,0x00000003,0x00000002,0x00030016,0x00000006,0x00000020,
  76. 0x00040017,0x00000007,0x00000006,0x00000004,0x00040020,0x00000008,0x00000003,0x00000007,
  77. 0x0004003b,0x00000008,0x00000009,0x00000003,0x00090019,0x0000000a,0x00000006,0x00000001,
  78. 0x00000000,0x00000000,0x00000000,0x00000001,0x00000000,0x0003001b,0x0000000b,0x0000000a,
  79. 0x00040020,0x0000000c,0x00000000,0x0000000b,0x0004003b,0x0000000c,0x0000000d,0x00000000,
  80. 0x00040017,0x0000000f,0x00000006,0x00000002,0x00040020,0x00000010,0x00000001,0x0000000f,
  81. 0x0004003b,0x00000010,0x00000011,0x00000001,0x00050036,0x00000002,0x00000004,0x00000000,
  82. 0x00000003,0x000200f8,0x00000005,0x0004003d,0x0000000b,0x0000000e,0x0000000d,0x0004003d,
  83. 0x0000000f,0x00000012,0x00000011,0x00050057,0x00000007,0x00000013,0x0000000e,0x00000012,
  84. 0x0003003e,0x00000009,0x00000013,0x000100fd,0x00010038
  85. };
  86. static const uint32_t vertShaderCode[] = {
  87. 0x07230203,0x00010000,0x00080007,0x00000018,0x00000000,0x00020011,0x00000001,0x0006000b,
  88. 0x00000001,0x4c534c47,0x6474732e,0x3035342e,0x00000000,0x0003000e,0x00000000,0x00000001,
  89. 0x0009000f,0x00000000,0x00000004,0x6e69616d,0x00000000,0x00000009,0x0000000b,0x00000010,
  90. 0x00000014,0x00030003,0x00000002,0x00000190,0x00090004,0x415f4c47,0x735f4252,0x72617065,
  91. 0x5f657461,0x64616873,0x6f5f7265,0x63656a62,0x00007374,0x00090004,0x415f4c47,0x735f4252,
  92. 0x69646168,0x6c5f676e,0x75676e61,0x5f656761,0x70303234,0x006b6361,0x00040005,0x00000004,
  93. 0x6e69616d,0x00000000,0x00050005,0x00000009,0x63786574,0x64726f6f,0x00000000,0x00040005,
  94. 0x0000000b,0x72747461,0x00000000,0x00060005,0x0000000e,0x505f6c67,0x65567265,0x78657472,
  95. 0x00000000,0x00060006,0x0000000e,0x00000000,0x505f6c67,0x7469736f,0x006e6f69,0x00030005,
  96. 0x00000010,0x00000000,0x00030005,0x00000014,0x00736f70,0x00040047,0x00000009,0x0000001e,
  97. 0x00000000,0x00040047,0x0000000b,0x0000001e,0x00000001,0x00050048,0x0000000e,0x00000000,
  98. 0x0000000b,0x00000000,0x00030047,0x0000000e,0x00000002,0x00040047,0x00000014,0x0000001e,
  99. 0x00000000,0x00020013,0x00000002,0x00030021,0x00000003,0x00000002,0x00030016,0x00000006,
  100. 0x00000020,0x00040017,0x00000007,0x00000006,0x00000002,0x00040020,0x00000008,0x00000003,
  101. 0x00000007,0x0004003b,0x00000008,0x00000009,0x00000003,0x00040020,0x0000000a,0x00000001,
  102. 0x00000007,0x0004003b,0x0000000a,0x0000000b,0x00000001,0x00040017,0x0000000d,0x00000006,
  103. 0x00000004,0x0003001e,0x0000000e,0x0000000d,0x00040020,0x0000000f,0x00000003,0x0000000e,
  104. 0x0004003b,0x0000000f,0x00000010,0x00000003,0x00040015,0x00000011,0x00000020,0x00000001,
  105. 0x0004002b,0x00000011,0x00000012,0x00000000,0x00040020,0x00000013,0x00000001,0x0000000d,
  106. 0x0004003b,0x00000013,0x00000014,0x00000001,0x00040020,0x00000016,0x00000003,0x0000000d,
  107. 0x00050036,0x00000002,0x00000004,0x00000000,0x00000003,0x000200f8,0x00000005,0x0004003d,
  108. 0x00000007,0x0000000c,0x0000000b,0x0003003e,0x00000009,0x0000000c,0x0004003d,0x0000000d,
  109. 0x00000015,0x00000014,0x00050041,0x00000016,0x00000017,0x00000010,0x00000012,0x0003003e,
  110. 0x00000017,0x00000015,0x000100fd,0x00010038
  111. };
  112. struct texture_object {
  113. VkSampler sampler;
  114. VkImage image;
  115. VkImageLayout imageLayout;
  116. VkDeviceMemory mem;
  117. VkImageView view;
  118. int32_t tex_width, tex_height;
  119. };
  120. static int validation_error = 0;
  121. VKAPI_ATTR VkBool32 VKAPI_CALL
  122. BreakCallback(VkFlags msgFlags, VkDebugReportObjectTypeEXT objType,
  123. uint64_t srcObject, size_t location, int32_t msgCode,
  124. const char *pLayerPrefix, const char *pMsg,
  125. void *pUserData) {
  126. #ifdef _WIN32
  127. DebugBreak();
  128. #else
  129. raise(SIGTRAP);
  130. #endif
  131. return false;
  132. }
  133. typedef struct {
  134. VkImage image;
  135. VkCommandBuffer cmd;
  136. VkImageView view;
  137. } SwapchainBuffers;
  138. struct demo {
  139. GLFWwindow* window;
  140. VkSurfaceKHR surface;
  141. bool use_staging_buffer;
  142. VkInstance inst;
  143. VkPhysicalDevice gpu;
  144. VkDevice device;
  145. VkQueue queue;
  146. VkPhysicalDeviceProperties gpu_props;
  147. VkPhysicalDeviceFeatures gpu_features;
  148. VkQueueFamilyProperties *queue_props;
  149. uint32_t graphics_queue_node_index;
  150. uint32_t enabled_extension_count;
  151. uint32_t enabled_layer_count;
  152. const char *extension_names[64];
  153. const char *enabled_layers[64];
  154. int width, height;
  155. VkFormat format;
  156. VkColorSpaceKHR color_space;
  157. uint32_t swapchainImageCount;
  158. VkSwapchainKHR swapchain;
  159. SwapchainBuffers *buffers;
  160. VkCommandPool cmd_pool;
  161. struct {
  162. VkFormat format;
  163. VkImage image;
  164. VkDeviceMemory mem;
  165. VkImageView view;
  166. } depth;
  167. struct texture_object textures[DEMO_TEXTURE_COUNT];
  168. struct {
  169. VkBuffer buf;
  170. VkDeviceMemory mem;
  171. VkPipelineVertexInputStateCreateInfo vi;
  172. VkVertexInputBindingDescription vi_bindings[1];
  173. VkVertexInputAttributeDescription vi_attrs[2];
  174. } vertices;
  175. VkCommandBuffer setup_cmd; // Command Buffer for initialization commands
  176. VkCommandBuffer draw_cmd; // Command Buffer for drawing commands
  177. VkPipelineLayout pipeline_layout;
  178. VkDescriptorSetLayout desc_layout;
  179. VkPipelineCache pipelineCache;
  180. VkRenderPass render_pass;
  181. VkPipeline pipeline;
  182. VkShaderModule vert_shader_module;
  183. VkShaderModule frag_shader_module;
  184. VkDescriptorPool desc_pool;
  185. VkDescriptorSet desc_set;
  186. VkFramebuffer *framebuffers;
  187. VkPhysicalDeviceMemoryProperties memory_properties;
  188. int32_t curFrame;
  189. int32_t frameCount;
  190. bool validate;
  191. bool use_break;
  192. VkDebugReportCallbackEXT msg_callback;
  193. float depthStencil;
  194. float depthIncrement;
  195. uint32_t current_buffer;
  196. uint32_t queue_count;
  197. };
  198. VKAPI_ATTR VkBool32 VKAPI_CALL
  199. dbgFunc(VkFlags msgFlags, VkDebugReportObjectTypeEXT objType,
  200. uint64_t srcObject, size_t location, int32_t msgCode,
  201. const char *pLayerPrefix, const char *pMsg, void *pUserData) {
  202. char *message = (char *)malloc(strlen(pMsg) + 100);
  203. assert(message);
  204. validation_error = 1;
  205. if (msgFlags & VK_DEBUG_REPORT_ERROR_BIT_EXT) {
  206. sprintf(message, "ERROR: [%s] Code %d : %s", pLayerPrefix, msgCode,
  207. pMsg);
  208. } else if (msgFlags & VK_DEBUG_REPORT_WARNING_BIT_EXT) {
  209. sprintf(message, "WARNING: [%s] Code %d : %s", pLayerPrefix, msgCode,
  210. pMsg);
  211. } else {
  212. return false;
  213. }
  214. printf("%s\n", message);
  215. fflush(stdout);
  216. free(message);
  217. /*
  218. * false indicates that layer should not bail-out of an
  219. * API call that had validation failures. This may mean that the
  220. * app dies inside the driver due to invalid parameter(s).
  221. * That's what would happen without validation layers, so we'll
  222. * keep that behavior here.
  223. */
  224. return false;
  225. }
  226. // Forward declaration:
  227. static void demo_resize(struct demo *demo);
  228. static bool memory_type_from_properties(struct demo *demo, uint32_t typeBits,
  229. VkFlags requirements_mask,
  230. uint32_t *typeIndex) {
  231. uint32_t i;
  232. // Search memtypes to find first index with those properties
  233. for (i = 0; i < VK_MAX_MEMORY_TYPES; i++) {
  234. if ((typeBits & 1) == 1) {
  235. // Type is available, does it match user properties?
  236. if ((demo->memory_properties.memoryTypes[i].propertyFlags &
  237. requirements_mask) == requirements_mask) {
  238. *typeIndex = i;
  239. return true;
  240. }
  241. }
  242. typeBits >>= 1;
  243. }
  244. // No memory types matched, return failure
  245. return false;
  246. }
  247. static void demo_flush_init_cmd(struct demo *demo) {
  248. VkResult U_ASSERT_ONLY err;
  249. if (demo->setup_cmd == VK_NULL_HANDLE)
  250. return;
  251. err = vkEndCommandBuffer(demo->setup_cmd);
  252. assert(!err);
  253. const VkCommandBuffer cmd_bufs[] = {demo->setup_cmd};
  254. VkFence nullFence = {VK_NULL_HANDLE};
  255. VkSubmitInfo submit_info = {.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO,
  256. .pNext = NULL,
  257. .waitSemaphoreCount = 0,
  258. .pWaitSemaphores = NULL,
  259. .pWaitDstStageMask = NULL,
  260. .commandBufferCount = 1,
  261. .pCommandBuffers = cmd_bufs,
  262. .signalSemaphoreCount = 0,
  263. .pSignalSemaphores = NULL};
  264. err = vkQueueSubmit(demo->queue, 1, &submit_info, nullFence);
  265. assert(!err);
  266. err = vkQueueWaitIdle(demo->queue);
  267. assert(!err);
  268. vkFreeCommandBuffers(demo->device, demo->cmd_pool, 1, cmd_bufs);
  269. demo->setup_cmd = VK_NULL_HANDLE;
  270. }
  271. static void demo_set_image_layout(struct demo *demo, VkImage image,
  272. VkImageAspectFlags aspectMask,
  273. VkImageLayout old_image_layout,
  274. VkImageLayout new_image_layout,
  275. VkAccessFlagBits srcAccessMask) {
  276. VkResult U_ASSERT_ONLY err;
  277. if (demo->setup_cmd == VK_NULL_HANDLE) {
  278. const VkCommandBufferAllocateInfo cmd = {
  279. .sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO,
  280. .pNext = NULL,
  281. .commandPool = demo->cmd_pool,
  282. .level = VK_COMMAND_BUFFER_LEVEL_PRIMARY,
  283. .commandBufferCount = 1,
  284. };
  285. err = vkAllocateCommandBuffers(demo->device, &cmd, &demo->setup_cmd);
  286. assert(!err);
  287. VkCommandBufferBeginInfo cmd_buf_info = {
  288. .sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO,
  289. .pNext = NULL,
  290. .flags = 0,
  291. .pInheritanceInfo = NULL,
  292. };
  293. err = vkBeginCommandBuffer(demo->setup_cmd, &cmd_buf_info);
  294. assert(!err);
  295. }
  296. VkImageMemoryBarrier image_memory_barrier = {
  297. .sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,
  298. .pNext = NULL,
  299. .srcAccessMask = srcAccessMask,
  300. .dstAccessMask = 0,
  301. .oldLayout = old_image_layout,
  302. .newLayout = new_image_layout,
  303. .image = image,
  304. .subresourceRange = {aspectMask, 0, 1, 0, 1}};
  305. if (new_image_layout == VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL) {
  306. /* Make sure anything that was copying from this image has completed */
  307. image_memory_barrier.dstAccessMask = VK_ACCESS_TRANSFER_READ_BIT;
  308. }
  309. if (new_image_layout == VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL) {
  310. image_memory_barrier.dstAccessMask =
  311. VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT;
  312. }
  313. if (new_image_layout == VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL) {
  314. image_memory_barrier.dstAccessMask =
  315. VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT;
  316. }
  317. if (new_image_layout == VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL) {
  318. /* Make sure any Copy or CPU writes to image are flushed */
  319. image_memory_barrier.dstAccessMask =
  320. VK_ACCESS_SHADER_READ_BIT | VK_ACCESS_INPUT_ATTACHMENT_READ_BIT;
  321. }
  322. VkImageMemoryBarrier *pmemory_barrier = &image_memory_barrier;
  323. VkPipelineStageFlags src_stages = VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT;
  324. VkPipelineStageFlags dest_stages = VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT;
  325. vkCmdPipelineBarrier(demo->setup_cmd, src_stages, dest_stages, 0, 0, NULL,
  326. 0, NULL, 1, pmemory_barrier);
  327. }
  328. static void demo_draw_build_cmd(struct demo *demo) {
  329. const VkCommandBufferBeginInfo cmd_buf_info = {
  330. .sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO,
  331. .pNext = NULL,
  332. .flags = 0,
  333. .pInheritanceInfo = NULL,
  334. };
  335. const VkClearValue clear_values[2] = {
  336. [0] = {.color.float32 = {0.2f, 0.2f, 0.2f, 0.2f}},
  337. [1] = {.depthStencil = {demo->depthStencil, 0}},
  338. };
  339. const VkRenderPassBeginInfo rp_begin = {
  340. .sType = VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO,
  341. .pNext = NULL,
  342. .renderPass = demo->render_pass,
  343. .framebuffer = demo->framebuffers[demo->current_buffer],
  344. .renderArea.offset.x = 0,
  345. .renderArea.offset.y = 0,
  346. .renderArea.extent.width = demo->width,
  347. .renderArea.extent.height = demo->height,
  348. .clearValueCount = 2,
  349. .pClearValues = clear_values,
  350. };
  351. VkResult U_ASSERT_ONLY err;
  352. err = vkBeginCommandBuffer(demo->draw_cmd, &cmd_buf_info);
  353. assert(!err);
  354. // We can use LAYOUT_UNDEFINED as a wildcard here because we don't care what
  355. // happens to the previous contents of the image
  356. VkImageMemoryBarrier image_memory_barrier = {
  357. .sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,
  358. .pNext = NULL,
  359. .srcAccessMask = 0,
  360. .dstAccessMask = VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT,
  361. .oldLayout = VK_IMAGE_LAYOUT_UNDEFINED,
  362. .newLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL,
  363. .srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED,
  364. .dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED,
  365. .image = demo->buffers[demo->current_buffer].image,
  366. .subresourceRange = {VK_IMAGE_ASPECT_COLOR_BIT, 0, 1, 0, 1}};
  367. vkCmdPipelineBarrier(demo->draw_cmd, VK_PIPELINE_STAGE_ALL_COMMANDS_BIT,
  368. VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT, 0, 0, NULL, 0,
  369. NULL, 1, &image_memory_barrier);
  370. vkCmdBeginRenderPass(demo->draw_cmd, &rp_begin, VK_SUBPASS_CONTENTS_INLINE);
  371. vkCmdBindPipeline(demo->draw_cmd, VK_PIPELINE_BIND_POINT_GRAPHICS,
  372. demo->pipeline);
  373. vkCmdBindDescriptorSets(demo->draw_cmd, VK_PIPELINE_BIND_POINT_GRAPHICS,
  374. demo->pipeline_layout, 0, 1, &demo->desc_set, 0,
  375. NULL);
  376. VkViewport viewport;
  377. memset(&viewport, 0, sizeof(viewport));
  378. viewport.height = (float)demo->height;
  379. viewport.width = (float)demo->width;
  380. viewport.minDepth = (float)0.0f;
  381. viewport.maxDepth = (float)1.0f;
  382. vkCmdSetViewport(demo->draw_cmd, 0, 1, &viewport);
  383. VkRect2D scissor;
  384. memset(&scissor, 0, sizeof(scissor));
  385. scissor.extent.width = demo->width;
  386. scissor.extent.height = demo->height;
  387. scissor.offset.x = 0;
  388. scissor.offset.y = 0;
  389. vkCmdSetScissor(demo->draw_cmd, 0, 1, &scissor);
  390. VkDeviceSize offsets[1] = {0};
  391. vkCmdBindVertexBuffers(demo->draw_cmd, VERTEX_BUFFER_BIND_ID, 1,
  392. &demo->vertices.buf, offsets);
  393. vkCmdDraw(demo->draw_cmd, 3, 1, 0, 0);
  394. vkCmdEndRenderPass(demo->draw_cmd);
  395. VkImageMemoryBarrier prePresentBarrier = {
  396. .sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,
  397. .pNext = NULL,
  398. .srcAccessMask = VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT,
  399. .dstAccessMask = VK_ACCESS_MEMORY_READ_BIT,
  400. .oldLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL,
  401. .newLayout = VK_IMAGE_LAYOUT_PRESENT_SRC_KHR,
  402. .srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED,
  403. .dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED,
  404. .subresourceRange = {VK_IMAGE_ASPECT_COLOR_BIT, 0, 1, 0, 1}};
  405. prePresentBarrier.image = demo->buffers[demo->current_buffer].image;
  406. VkImageMemoryBarrier *pmemory_barrier = &prePresentBarrier;
  407. vkCmdPipelineBarrier(demo->draw_cmd, VK_PIPELINE_STAGE_ALL_COMMANDS_BIT,
  408. VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT, 0, 0, NULL, 0,
  409. NULL, 1, pmemory_barrier);
  410. err = vkEndCommandBuffer(demo->draw_cmd);
  411. assert(!err);
  412. }
  413. static void demo_draw(struct demo *demo) {
  414. VkResult U_ASSERT_ONLY err;
  415. VkSemaphore imageAcquiredSemaphore, drawCompleteSemaphore;
  416. VkSemaphoreCreateInfo semaphoreCreateInfo = {
  417. .sType = VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO,
  418. .pNext = NULL,
  419. .flags = 0,
  420. };
  421. err = vkCreateSemaphore(demo->device, &semaphoreCreateInfo,
  422. NULL, &imageAcquiredSemaphore);
  423. assert(!err);
  424. err = vkCreateSemaphore(demo->device, &semaphoreCreateInfo,
  425. NULL, &drawCompleteSemaphore);
  426. assert(!err);
  427. // Get the index of the next available swapchain image:
  428. err = vkAcquireNextImageKHR(demo->device, demo->swapchain, UINT64_MAX,
  429. imageAcquiredSemaphore,
  430. (VkFence)0, // TODO: Show use of fence
  431. &demo->current_buffer);
  432. if (err == VK_ERROR_OUT_OF_DATE_KHR) {
  433. // demo->swapchain is out of date (e.g. the window was resized) and
  434. // must be recreated:
  435. demo_resize(demo);
  436. demo_draw(demo);
  437. vkDestroySemaphore(demo->device, imageAcquiredSemaphore, NULL);
  438. vkDestroySemaphore(demo->device, drawCompleteSemaphore, NULL);
  439. return;
  440. } else if (err == VK_SUBOPTIMAL_KHR) {
  441. // demo->swapchain is not as optimal as it could be, but the platform's
  442. // presentation engine will still present the image correctly.
  443. } else {
  444. assert(!err);
  445. }
  446. demo_flush_init_cmd(demo);
  447. // Wait for the present complete semaphore to be signaled to ensure
  448. // that the image won't be rendered to until the presentation
  449. // engine has fully released ownership to the application, and it is
  450. // okay to render to the image.
  451. demo_draw_build_cmd(demo);
  452. VkFence nullFence = VK_NULL_HANDLE;
  453. VkPipelineStageFlags pipe_stage_flags =
  454. VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT;
  455. VkSubmitInfo submit_info = {.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO,
  456. .pNext = NULL,
  457. .waitSemaphoreCount = 1,
  458. .pWaitSemaphores = &imageAcquiredSemaphore,
  459. .pWaitDstStageMask = &pipe_stage_flags,
  460. .commandBufferCount = 1,
  461. .pCommandBuffers = &demo->draw_cmd,
  462. .signalSemaphoreCount = 1,
  463. .pSignalSemaphores = &drawCompleteSemaphore};
  464. err = vkQueueSubmit(demo->queue, 1, &submit_info, nullFence);
  465. assert(!err);
  466. VkPresentInfoKHR present = {
  467. .sType = VK_STRUCTURE_TYPE_PRESENT_INFO_KHR,
  468. .pNext = NULL,
  469. .waitSemaphoreCount = 1,
  470. .pWaitSemaphores = &drawCompleteSemaphore,
  471. .swapchainCount = 1,
  472. .pSwapchains = &demo->swapchain,
  473. .pImageIndices = &demo->current_buffer,
  474. };
  475. err = vkQueuePresentKHR(demo->queue, &present);
  476. if (err == VK_ERROR_OUT_OF_DATE_KHR) {
  477. // demo->swapchain is out of date (e.g. the window was resized) and
  478. // must be recreated:
  479. demo_resize(demo);
  480. } else if (err == VK_SUBOPTIMAL_KHR) {
  481. // demo->swapchain is not as optimal as it could be, but the platform's
  482. // presentation engine will still present the image correctly.
  483. } else {
  484. assert(!err);
  485. }
  486. err = vkQueueWaitIdle(demo->queue);
  487. assert(err == VK_SUCCESS);
  488. vkDestroySemaphore(demo->device, imageAcquiredSemaphore, NULL);
  489. vkDestroySemaphore(demo->device, drawCompleteSemaphore, NULL);
  490. }
  491. static void demo_prepare_buffers(struct demo *demo) {
  492. VkResult U_ASSERT_ONLY err;
  493. VkSwapchainKHR oldSwapchain = demo->swapchain;
  494. // Check the surface capabilities and formats
  495. VkSurfaceCapabilitiesKHR surfCapabilities;
  496. err = vkGetPhysicalDeviceSurfaceCapabilitiesKHR(
  497. demo->gpu, demo->surface, &surfCapabilities);
  498. assert(!err);
  499. uint32_t presentModeCount;
  500. err = vkGetPhysicalDeviceSurfacePresentModesKHR(
  501. demo->gpu, demo->surface, &presentModeCount, NULL);
  502. assert(!err);
  503. VkPresentModeKHR *presentModes =
  504. (VkPresentModeKHR *)malloc(presentModeCount * sizeof(VkPresentModeKHR));
  505. assert(presentModes);
  506. err = vkGetPhysicalDeviceSurfacePresentModesKHR(
  507. demo->gpu, demo->surface, &presentModeCount, presentModes);
  508. assert(!err);
  509. VkExtent2D swapchainExtent;
  510. // width and height are either both 0xFFFFFFFF, or both not 0xFFFFFFFF.
  511. if (surfCapabilities.currentExtent.width == 0xFFFFFFFF) {
  512. // If the surface size is undefined, the size is set to the size
  513. // of the images requested, which must fit within the minimum and
  514. // maximum values.
  515. swapchainExtent.width = demo->width;
  516. swapchainExtent.height = demo->height;
  517. if (swapchainExtent.width < surfCapabilities.minImageExtent.width) {
  518. swapchainExtent.width = surfCapabilities.minImageExtent.width;
  519. } else if (swapchainExtent.width > surfCapabilities.maxImageExtent.width) {
  520. swapchainExtent.width = surfCapabilities.maxImageExtent.width;
  521. }
  522. if (swapchainExtent.height < surfCapabilities.minImageExtent.height) {
  523. swapchainExtent.height = surfCapabilities.minImageExtent.height;
  524. } else if (swapchainExtent.height > surfCapabilities.maxImageExtent.height) {
  525. swapchainExtent.height = surfCapabilities.maxImageExtent.height;
  526. }
  527. } else {
  528. // If the surface size is defined, the swap chain size must match
  529. swapchainExtent = surfCapabilities.currentExtent;
  530. demo->width = surfCapabilities.currentExtent.width;
  531. demo->height = surfCapabilities.currentExtent.height;
  532. }
  533. VkPresentModeKHR swapchainPresentMode = VK_PRESENT_MODE_FIFO_KHR;
  534. // Determine the number of VkImage's to use in the swap chain.
  535. // Application desires to only acquire 1 image at a time (which is
  536. // "surfCapabilities.minImageCount").
  537. uint32_t desiredNumOfSwapchainImages = surfCapabilities.minImageCount;
  538. // If maxImageCount is 0, we can ask for as many images as we want;
  539. // otherwise we're limited to maxImageCount
  540. if ((surfCapabilities.maxImageCount > 0) &&
  541. (desiredNumOfSwapchainImages > surfCapabilities.maxImageCount)) {
  542. // Application must settle for fewer images than desired:
  543. desiredNumOfSwapchainImages = surfCapabilities.maxImageCount;
  544. }
  545. VkSurfaceTransformFlagsKHR preTransform;
  546. if (surfCapabilities.supportedTransforms &
  547. VK_SURFACE_TRANSFORM_IDENTITY_BIT_KHR) {
  548. preTransform = VK_SURFACE_TRANSFORM_IDENTITY_BIT_KHR;
  549. } else {
  550. preTransform = surfCapabilities.currentTransform;
  551. }
  552. const VkSwapchainCreateInfoKHR swapchain = {
  553. .sType = VK_STRUCTURE_TYPE_SWAPCHAIN_CREATE_INFO_KHR,
  554. .pNext = NULL,
  555. .surface = demo->surface,
  556. .minImageCount = desiredNumOfSwapchainImages,
  557. .imageFormat = demo->format,
  558. .imageColorSpace = demo->color_space,
  559. .imageExtent =
  560. {
  561. .width = swapchainExtent.width, .height = swapchainExtent.height,
  562. },
  563. .imageUsage = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT,
  564. .preTransform = preTransform,
  565. .compositeAlpha = VK_COMPOSITE_ALPHA_OPAQUE_BIT_KHR,
  566. .imageArrayLayers = 1,
  567. .imageSharingMode = VK_SHARING_MODE_EXCLUSIVE,
  568. .queueFamilyIndexCount = 0,
  569. .pQueueFamilyIndices = NULL,
  570. .presentMode = swapchainPresentMode,
  571. .oldSwapchain = oldSwapchain,
  572. .clipped = true,
  573. };
  574. uint32_t i;
  575. err = vkCreateSwapchainKHR(demo->device, &swapchain, NULL, &demo->swapchain);
  576. assert(!err);
  577. // If we just re-created an existing swapchain, we should destroy the old
  578. // swapchain at this point.
  579. // Note: destroying the swapchain also cleans up all its associated
  580. // presentable images once the platform is done with them.
  581. if (oldSwapchain != VK_NULL_HANDLE) {
  582. vkDestroySwapchainKHR(demo->device, oldSwapchain, NULL);
  583. }
  584. err = vkGetSwapchainImagesKHR(demo->device, demo->swapchain,
  585. &demo->swapchainImageCount, NULL);
  586. assert(!err);
  587. VkImage *swapchainImages =
  588. (VkImage *)malloc(demo->swapchainImageCount * sizeof(VkImage));
  589. assert(swapchainImages);
  590. err = vkGetSwapchainImagesKHR(demo->device, demo->swapchain,
  591. &demo->swapchainImageCount,
  592. swapchainImages);
  593. assert(!err);
  594. demo->buffers = (SwapchainBuffers *)malloc(sizeof(SwapchainBuffers) *
  595. demo->swapchainImageCount);
  596. assert(demo->buffers);
  597. for (i = 0; i < demo->swapchainImageCount; i++) {
  598. VkImageViewCreateInfo color_attachment_view = {
  599. .sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO,
  600. .pNext = NULL,
  601. .format = demo->format,
  602. .components =
  603. {
  604. .r = VK_COMPONENT_SWIZZLE_R,
  605. .g = VK_COMPONENT_SWIZZLE_G,
  606. .b = VK_COMPONENT_SWIZZLE_B,
  607. .a = VK_COMPONENT_SWIZZLE_A,
  608. },
  609. .subresourceRange = {.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT,
  610. .baseMipLevel = 0,
  611. .levelCount = 1,
  612. .baseArrayLayer = 0,
  613. .layerCount = 1},
  614. .viewType = VK_IMAGE_VIEW_TYPE_2D,
  615. .flags = 0,
  616. };
  617. demo->buffers[i].image = swapchainImages[i];
  618. color_attachment_view.image = demo->buffers[i].image;
  619. err = vkCreateImageView(demo->device, &color_attachment_view, NULL,
  620. &demo->buffers[i].view);
  621. assert(!err);
  622. }
  623. demo->current_buffer = 0;
  624. if (NULL != presentModes) {
  625. free(presentModes);
  626. }
  627. }
  628. static void demo_prepare_depth(struct demo *demo) {
  629. const VkFormat depth_format = VK_FORMAT_D16_UNORM;
  630. const VkImageCreateInfo image = {
  631. .sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO,
  632. .pNext = NULL,
  633. .imageType = VK_IMAGE_TYPE_2D,
  634. .format = depth_format,
  635. .extent = {demo->width, demo->height, 1},
  636. .mipLevels = 1,
  637. .arrayLayers = 1,
  638. .samples = VK_SAMPLE_COUNT_1_BIT,
  639. .tiling = VK_IMAGE_TILING_OPTIMAL,
  640. .usage = VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT,
  641. .flags = 0,
  642. };
  643. VkMemoryAllocateInfo mem_alloc = {
  644. .sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO,
  645. .pNext = NULL,
  646. .allocationSize = 0,
  647. .memoryTypeIndex = 0,
  648. };
  649. VkImageViewCreateInfo view = {
  650. .sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO,
  651. .pNext = NULL,
  652. .image = VK_NULL_HANDLE,
  653. .format = depth_format,
  654. .subresourceRange = {.aspectMask = VK_IMAGE_ASPECT_DEPTH_BIT,
  655. .baseMipLevel = 0,
  656. .levelCount = 1,
  657. .baseArrayLayer = 0,
  658. .layerCount = 1},
  659. .flags = 0,
  660. .viewType = VK_IMAGE_VIEW_TYPE_2D,
  661. };
  662. VkMemoryRequirements mem_reqs;
  663. VkResult U_ASSERT_ONLY err;
  664. bool U_ASSERT_ONLY pass;
  665. demo->depth.format = depth_format;
  666. /* create image */
  667. err = vkCreateImage(demo->device, &image, NULL, &demo->depth.image);
  668. assert(!err);
  669. /* get memory requirements for this object */
  670. vkGetImageMemoryRequirements(demo->device, demo->depth.image, &mem_reqs);
  671. /* select memory size and type */
  672. mem_alloc.allocationSize = mem_reqs.size;
  673. pass = memory_type_from_properties(demo, mem_reqs.memoryTypeBits,
  674. 0, /* No requirements */
  675. &mem_alloc.memoryTypeIndex);
  676. assert(pass);
  677. /* allocate memory */
  678. err = vkAllocateMemory(demo->device, &mem_alloc, NULL, &demo->depth.mem);
  679. assert(!err);
  680. /* bind memory */
  681. err =
  682. vkBindImageMemory(demo->device, demo->depth.image, demo->depth.mem, 0);
  683. assert(!err);
  684. demo_set_image_layout(demo, demo->depth.image, VK_IMAGE_ASPECT_DEPTH_BIT,
  685. VK_IMAGE_LAYOUT_UNDEFINED,
  686. VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL,
  687. 0);
  688. /* create image view */
  689. view.image = demo->depth.image;
  690. err = vkCreateImageView(demo->device, &view, NULL, &demo->depth.view);
  691. assert(!err);
  692. }
  693. static void
  694. demo_prepare_texture_image(struct demo *demo, const uint32_t *tex_colors,
  695. struct texture_object *tex_obj, VkImageTiling tiling,
  696. VkImageUsageFlags usage, VkFlags required_props) {
  697. const VkFormat tex_format = VK_FORMAT_B8G8R8A8_UNORM;
  698. const int32_t tex_width = 2;
  699. const int32_t tex_height = 2;
  700. VkResult U_ASSERT_ONLY err;
  701. bool U_ASSERT_ONLY pass;
  702. tex_obj->tex_width = tex_width;
  703. tex_obj->tex_height = tex_height;
  704. const VkImageCreateInfo image_create_info = {
  705. .sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO,
  706. .pNext = NULL,
  707. .imageType = VK_IMAGE_TYPE_2D,
  708. .format = tex_format,
  709. .extent = {tex_width, tex_height, 1},
  710. .mipLevels = 1,
  711. .arrayLayers = 1,
  712. .samples = VK_SAMPLE_COUNT_1_BIT,
  713. .tiling = tiling,
  714. .usage = usage,
  715. .flags = 0,
  716. .initialLayout = VK_IMAGE_LAYOUT_PREINITIALIZED
  717. };
  718. VkMemoryAllocateInfo mem_alloc = {
  719. .sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO,
  720. .pNext = NULL,
  721. .allocationSize = 0,
  722. .memoryTypeIndex = 0,
  723. };
  724. VkMemoryRequirements mem_reqs;
  725. err =
  726. vkCreateImage(demo->device, &image_create_info, NULL, &tex_obj->image);
  727. assert(!err);
  728. vkGetImageMemoryRequirements(demo->device, tex_obj->image, &mem_reqs);
  729. mem_alloc.allocationSize = mem_reqs.size;
  730. pass =
  731. memory_type_from_properties(demo, mem_reqs.memoryTypeBits,
  732. required_props, &mem_alloc.memoryTypeIndex);
  733. assert(pass);
  734. /* allocate memory */
  735. err = vkAllocateMemory(demo->device, &mem_alloc, NULL, &tex_obj->mem);
  736. assert(!err);
  737. /* bind memory */
  738. err = vkBindImageMemory(demo->device, tex_obj->image, tex_obj->mem, 0);
  739. assert(!err);
  740. if (required_props & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) {
  741. const VkImageSubresource subres = {
  742. .aspectMask = VK_IMAGE_ASPECT_COLOR_BIT,
  743. .mipLevel = 0,
  744. .arrayLayer = 0,
  745. };
  746. VkSubresourceLayout layout;
  747. void *data;
  748. int32_t x, y;
  749. vkGetImageSubresourceLayout(demo->device, tex_obj->image, &subres,
  750. &layout);
  751. err = vkMapMemory(demo->device, tex_obj->mem, 0,
  752. mem_alloc.allocationSize, 0, &data);
  753. assert(!err);
  754. for (y = 0; y < tex_height; y++) {
  755. uint32_t *row = (uint32_t *)((char *)data + layout.rowPitch * y);
  756. for (x = 0; x < tex_width; x++)
  757. row[x] = tex_colors[(x & 1) ^ (y & 1)];
  758. }
  759. vkUnmapMemory(demo->device, tex_obj->mem);
  760. }
  761. tex_obj->imageLayout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL;
  762. demo_set_image_layout(demo, tex_obj->image, VK_IMAGE_ASPECT_COLOR_BIT,
  763. VK_IMAGE_LAYOUT_PREINITIALIZED, tex_obj->imageLayout,
  764. VK_ACCESS_HOST_WRITE_BIT);
  765. /* setting the image layout does not reference the actual memory so no need
  766. * to add a mem ref */
  767. }
  768. static void demo_destroy_texture_image(struct demo *demo,
  769. struct texture_object *tex_obj) {
  770. /* clean up staging resources */
  771. vkDestroyImage(demo->device, tex_obj->image, NULL);
  772. vkFreeMemory(demo->device, tex_obj->mem, NULL);
  773. }
  774. static void demo_prepare_textures(struct demo *demo) {
  775. const VkFormat tex_format = VK_FORMAT_B8G8R8A8_UNORM;
  776. VkFormatProperties props;
  777. const uint32_t tex_colors[DEMO_TEXTURE_COUNT][2] = {
  778. {0xffff0000, 0xff00ff00},
  779. };
  780. uint32_t i;
  781. VkResult U_ASSERT_ONLY err;
  782. vkGetPhysicalDeviceFormatProperties(demo->gpu, tex_format, &props);
  783. for (i = 0; i < DEMO_TEXTURE_COUNT; i++) {
  784. if ((props.linearTilingFeatures &
  785. VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT) &&
  786. !demo->use_staging_buffer) {
  787. /* Device can texture using linear textures */
  788. demo_prepare_texture_image(
  789. demo, tex_colors[i], &demo->textures[i], VK_IMAGE_TILING_LINEAR,
  790. VK_IMAGE_USAGE_SAMPLED_BIT,
  791. VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT |
  792. VK_MEMORY_PROPERTY_HOST_COHERENT_BIT);
  793. } else if (props.optimalTilingFeatures &
  794. VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT) {
  795. /* Must use staging buffer to copy linear texture to optimized */
  796. struct texture_object staging_texture;
  797. memset(&staging_texture, 0, sizeof(staging_texture));
  798. demo_prepare_texture_image(
  799. demo, tex_colors[i], &staging_texture, VK_IMAGE_TILING_LINEAR,
  800. VK_IMAGE_USAGE_TRANSFER_SRC_BIT,
  801. VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT |
  802. VK_MEMORY_PROPERTY_HOST_COHERENT_BIT);
  803. demo_prepare_texture_image(
  804. demo, tex_colors[i], &demo->textures[i],
  805. VK_IMAGE_TILING_OPTIMAL,
  806. (VK_IMAGE_USAGE_TRANSFER_DST_BIT | VK_IMAGE_USAGE_SAMPLED_BIT),
  807. VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT);
  808. demo_set_image_layout(demo, staging_texture.image,
  809. VK_IMAGE_ASPECT_COLOR_BIT,
  810. staging_texture.imageLayout,
  811. VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
  812. 0);
  813. demo_set_image_layout(demo, demo->textures[i].image,
  814. VK_IMAGE_ASPECT_COLOR_BIT,
  815. demo->textures[i].imageLayout,
  816. VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
  817. 0);
  818. VkImageCopy copy_region = {
  819. .srcSubresource = {VK_IMAGE_ASPECT_COLOR_BIT, 0, 0, 1},
  820. .srcOffset = {0, 0, 0},
  821. .dstSubresource = {VK_IMAGE_ASPECT_COLOR_BIT, 0, 0, 1},
  822. .dstOffset = {0, 0, 0},
  823. .extent = {staging_texture.tex_width,
  824. staging_texture.tex_height, 1},
  825. };
  826. vkCmdCopyImage(
  827. demo->setup_cmd, staging_texture.image,
  828. VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, demo->textures[i].image,
  829. VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1, &copy_region);
  830. demo_set_image_layout(demo, demo->textures[i].image,
  831. VK_IMAGE_ASPECT_COLOR_BIT,
  832. VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
  833. demo->textures[i].imageLayout,
  834. 0);
  835. demo_flush_init_cmd(demo);
  836. demo_destroy_texture_image(demo, &staging_texture);
  837. } else {
  838. /* Can't support VK_FORMAT_B8G8R8A8_UNORM !? */
  839. assert(!"No support for B8G8R8A8_UNORM as texture image format");
  840. }
  841. const VkSamplerCreateInfo sampler = {
  842. .sType = VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO,
  843. .pNext = NULL,
  844. .magFilter = VK_FILTER_NEAREST,
  845. .minFilter = VK_FILTER_NEAREST,
  846. .mipmapMode = VK_SAMPLER_MIPMAP_MODE_NEAREST,
  847. .addressModeU = VK_SAMPLER_ADDRESS_MODE_REPEAT,
  848. .addressModeV = VK_SAMPLER_ADDRESS_MODE_REPEAT,
  849. .addressModeW = VK_SAMPLER_ADDRESS_MODE_REPEAT,
  850. .mipLodBias = 0.0f,
  851. .anisotropyEnable = VK_FALSE,
  852. .maxAnisotropy = 1,
  853. .compareOp = VK_COMPARE_OP_NEVER,
  854. .minLod = 0.0f,
  855. .maxLod = 0.0f,
  856. .borderColor = VK_BORDER_COLOR_FLOAT_OPAQUE_WHITE,
  857. .unnormalizedCoordinates = VK_FALSE,
  858. };
  859. VkImageViewCreateInfo view = {
  860. .sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO,
  861. .pNext = NULL,
  862. .image = VK_NULL_HANDLE,
  863. .viewType = VK_IMAGE_VIEW_TYPE_2D,
  864. .format = tex_format,
  865. .components =
  866. {
  867. VK_COMPONENT_SWIZZLE_R, VK_COMPONENT_SWIZZLE_G,
  868. VK_COMPONENT_SWIZZLE_B, VK_COMPONENT_SWIZZLE_A,
  869. },
  870. .subresourceRange = {VK_IMAGE_ASPECT_COLOR_BIT, 0, 1, 0, 1},
  871. .flags = 0,
  872. };
  873. /* create sampler */
  874. err = vkCreateSampler(demo->device, &sampler, NULL,
  875. &demo->textures[i].sampler);
  876. assert(!err);
  877. /* create image view */
  878. view.image = demo->textures[i].image;
  879. err = vkCreateImageView(demo->device, &view, NULL,
  880. &demo->textures[i].view);
  881. assert(!err);
  882. }
  883. }
  884. static void demo_prepare_vertices(struct demo *demo) {
  885. // clang-format off
  886. const float vb[3][5] = {
  887. /* position texcoord */
  888. { -1.0f, -1.0f, 0.25f, 0.0f, 0.0f },
  889. { 1.0f, -1.0f, 0.25f, 1.0f, 0.0f },
  890. { 0.0f, 1.0f, 1.0f, 0.5f, 1.0f },
  891. };
  892. // clang-format on
  893. const VkBufferCreateInfo buf_info = {
  894. .sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO,
  895. .pNext = NULL,
  896. .size = sizeof(vb),
  897. .usage = VK_BUFFER_USAGE_VERTEX_BUFFER_BIT,
  898. .flags = 0,
  899. };
  900. VkMemoryAllocateInfo mem_alloc = {
  901. .sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO,
  902. .pNext = NULL,
  903. .allocationSize = 0,
  904. .memoryTypeIndex = 0,
  905. };
  906. VkMemoryRequirements mem_reqs;
  907. VkResult U_ASSERT_ONLY err;
  908. bool U_ASSERT_ONLY pass;
  909. void *data;
  910. memset(&demo->vertices, 0, sizeof(demo->vertices));
  911. err = vkCreateBuffer(demo->device, &buf_info, NULL, &demo->vertices.buf);
  912. assert(!err);
  913. vkGetBufferMemoryRequirements(demo->device, demo->vertices.buf, &mem_reqs);
  914. assert(!err);
  915. mem_alloc.allocationSize = mem_reqs.size;
  916. pass = memory_type_from_properties(demo, mem_reqs.memoryTypeBits,
  917. VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT |
  918. VK_MEMORY_PROPERTY_HOST_COHERENT_BIT,
  919. &mem_alloc.memoryTypeIndex);
  920. assert(pass);
  921. err = vkAllocateMemory(demo->device, &mem_alloc, NULL, &demo->vertices.mem);
  922. assert(!err);
  923. err = vkMapMemory(demo->device, demo->vertices.mem, 0,
  924. mem_alloc.allocationSize, 0, &data);
  925. assert(!err);
  926. memcpy(data, vb, sizeof(vb));
  927. vkUnmapMemory(demo->device, demo->vertices.mem);
  928. err = vkBindBufferMemory(demo->device, demo->vertices.buf,
  929. demo->vertices.mem, 0);
  930. assert(!err);
  931. demo->vertices.vi.sType =
  932. VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_STATE_CREATE_INFO;
  933. demo->vertices.vi.pNext = NULL;
  934. demo->vertices.vi.vertexBindingDescriptionCount = 1;
  935. demo->vertices.vi.pVertexBindingDescriptions = demo->vertices.vi_bindings;
  936. demo->vertices.vi.vertexAttributeDescriptionCount = 2;
  937. demo->vertices.vi.pVertexAttributeDescriptions = demo->vertices.vi_attrs;
  938. demo->vertices.vi_bindings[0].binding = VERTEX_BUFFER_BIND_ID;
  939. demo->vertices.vi_bindings[0].stride = sizeof(vb[0]);
  940. demo->vertices.vi_bindings[0].inputRate = VK_VERTEX_INPUT_RATE_VERTEX;
  941. demo->vertices.vi_attrs[0].binding = VERTEX_BUFFER_BIND_ID;
  942. demo->vertices.vi_attrs[0].location = 0;
  943. demo->vertices.vi_attrs[0].format = VK_FORMAT_R32G32B32_SFLOAT;
  944. demo->vertices.vi_attrs[0].offset = 0;
  945. demo->vertices.vi_attrs[1].binding = VERTEX_BUFFER_BIND_ID;
  946. demo->vertices.vi_attrs[1].location = 1;
  947. demo->vertices.vi_attrs[1].format = VK_FORMAT_R32G32_SFLOAT;
  948. demo->vertices.vi_attrs[1].offset = sizeof(float) * 3;
  949. }
  950. static void demo_prepare_descriptor_layout(struct demo *demo) {
  951. const VkDescriptorSetLayoutBinding layout_binding = {
  952. .binding = 0,
  953. .descriptorType = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER,
  954. .descriptorCount = DEMO_TEXTURE_COUNT,
  955. .stageFlags = VK_SHADER_STAGE_FRAGMENT_BIT,
  956. .pImmutableSamplers = NULL,
  957. };
  958. const VkDescriptorSetLayoutCreateInfo descriptor_layout = {
  959. .sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO,
  960. .pNext = NULL,
  961. .bindingCount = 1,
  962. .pBindings = &layout_binding,
  963. };
  964. VkResult U_ASSERT_ONLY err;
  965. err = vkCreateDescriptorSetLayout(demo->device, &descriptor_layout, NULL,
  966. &demo->desc_layout);
  967. assert(!err);
  968. const VkPipelineLayoutCreateInfo pPipelineLayoutCreateInfo = {
  969. .sType = VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO,
  970. .pNext = NULL,
  971. .setLayoutCount = 1,
  972. .pSetLayouts = &demo->desc_layout,
  973. };
  974. err = vkCreatePipelineLayout(demo->device, &pPipelineLayoutCreateInfo, NULL,
  975. &demo->pipeline_layout);
  976. assert(!err);
  977. }
  978. static void demo_prepare_render_pass(struct demo *demo) {
  979. const VkAttachmentDescription attachments[2] = {
  980. [0] =
  981. {
  982. .format = demo->format,
  983. .samples = VK_SAMPLE_COUNT_1_BIT,
  984. .loadOp = VK_ATTACHMENT_LOAD_OP_CLEAR,
  985. .storeOp = VK_ATTACHMENT_STORE_OP_STORE,
  986. .stencilLoadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE,
  987. .stencilStoreOp = VK_ATTACHMENT_STORE_OP_DONT_CARE,
  988. .initialLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL,
  989. .finalLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL,
  990. },
  991. [1] =
  992. {
  993. .format = demo->depth.format,
  994. .samples = VK_SAMPLE_COUNT_1_BIT,
  995. .loadOp = VK_ATTACHMENT_LOAD_OP_CLEAR,
  996. .storeOp = VK_ATTACHMENT_STORE_OP_DONT_CARE,
  997. .stencilLoadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE,
  998. .stencilStoreOp = VK_ATTACHMENT_STORE_OP_DONT_CARE,
  999. .initialLayout =
  1000. VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL,
  1001. .finalLayout =
  1002. VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL,
  1003. },
  1004. };
  1005. const VkAttachmentReference color_reference = {
  1006. .attachment = 0, .layout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL,
  1007. };
  1008. const VkAttachmentReference depth_reference = {
  1009. .attachment = 1,
  1010. .layout = VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL,
  1011. };
  1012. const VkSubpassDescription subpass = {
  1013. .pipelineBindPoint = VK_PIPELINE_BIND_POINT_GRAPHICS,
  1014. .flags = 0,
  1015. .inputAttachmentCount = 0,
  1016. .pInputAttachments = NULL,
  1017. .colorAttachmentCount = 1,
  1018. .pColorAttachments = &color_reference,
  1019. .pResolveAttachments = NULL,
  1020. .pDepthStencilAttachment = &depth_reference,
  1021. .preserveAttachmentCount = 0,
  1022. .pPreserveAttachments = NULL,
  1023. };
  1024. const VkRenderPassCreateInfo rp_info = {
  1025. .sType = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO,
  1026. .pNext = NULL,
  1027. .attachmentCount = 2,
  1028. .pAttachments = attachments,
  1029. .subpassCount = 1,
  1030. .pSubpasses = &subpass,
  1031. .dependencyCount = 0,
  1032. .pDependencies = NULL,
  1033. };
  1034. VkResult U_ASSERT_ONLY err;
  1035. err = vkCreateRenderPass(demo->device, &rp_info, NULL, &demo->render_pass);
  1036. assert(!err);
  1037. }
  1038. static VkShaderModule
  1039. demo_prepare_shader_module(struct demo *demo, const void *code, size_t size) {
  1040. VkShaderModuleCreateInfo moduleCreateInfo;
  1041. VkShaderModule module;
  1042. VkResult U_ASSERT_ONLY err;
  1043. moduleCreateInfo.sType = VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO;
  1044. moduleCreateInfo.pNext = NULL;
  1045. moduleCreateInfo.codeSize = size;
  1046. moduleCreateInfo.pCode = code;
  1047. moduleCreateInfo.flags = 0;
  1048. err = vkCreateShaderModule(demo->device, &moduleCreateInfo, NULL, &module);
  1049. assert(!err);
  1050. return module;
  1051. }
  1052. static VkShaderModule demo_prepare_vs(struct demo *demo) {
  1053. size_t size = sizeof(vertShaderCode);
  1054. demo->vert_shader_module =
  1055. demo_prepare_shader_module(demo, vertShaderCode, size);
  1056. return demo->vert_shader_module;
  1057. }
  1058. static VkShaderModule demo_prepare_fs(struct demo *demo) {
  1059. size_t size = sizeof(fragShaderCode);
  1060. demo->frag_shader_module =
  1061. demo_prepare_shader_module(demo, fragShaderCode, size);
  1062. return demo->frag_shader_module;
  1063. }
  1064. static void demo_prepare_pipeline(struct demo *demo) {
  1065. VkGraphicsPipelineCreateInfo pipeline;
  1066. VkPipelineCacheCreateInfo pipelineCache;
  1067. VkPipelineVertexInputStateCreateInfo vi;
  1068. VkPipelineInputAssemblyStateCreateInfo ia;
  1069. VkPipelineRasterizationStateCreateInfo rs;
  1070. VkPipelineColorBlendStateCreateInfo cb;
  1071. VkPipelineDepthStencilStateCreateInfo ds;
  1072. VkPipelineViewportStateCreateInfo vp;
  1073. VkPipelineMultisampleStateCreateInfo ms;
  1074. VkDynamicState dynamicStateEnables[VK_DYNAMIC_STATE_RANGE_SIZE];
  1075. VkPipelineDynamicStateCreateInfo dynamicState;
  1076. VkResult U_ASSERT_ONLY err;
  1077. memset(dynamicStateEnables, 0, sizeof dynamicStateEnables);
  1078. memset(&dynamicState, 0, sizeof dynamicState);
  1079. dynamicState.sType = VK_STRUCTURE_TYPE_PIPELINE_DYNAMIC_STATE_CREATE_INFO;
  1080. dynamicState.pDynamicStates = dynamicStateEnables;
  1081. memset(&pipeline, 0, sizeof(pipeline));
  1082. pipeline.sType = VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO;
  1083. pipeline.layout = demo->pipeline_layout;
  1084. vi = demo->vertices.vi;
  1085. memset(&ia, 0, sizeof(ia));
  1086. ia.sType = VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO;
  1087. ia.topology = VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST;
  1088. memset(&rs, 0, sizeof(rs));
  1089. rs.sType = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_CREATE_INFO;
  1090. rs.polygonMode = VK_POLYGON_MODE_FILL;
  1091. rs.cullMode = VK_CULL_MODE_BACK_BIT;
  1092. rs.frontFace = VK_FRONT_FACE_CLOCKWISE;
  1093. rs.depthClampEnable = VK_FALSE;
  1094. rs.rasterizerDiscardEnable = VK_FALSE;
  1095. rs.depthBiasEnable = VK_FALSE;
  1096. rs.lineWidth = 1.0f;
  1097. memset(&cb, 0, sizeof(cb));
  1098. cb.sType = VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_STATE_CREATE_INFO;
  1099. VkPipelineColorBlendAttachmentState att_state[1];
  1100. memset(att_state, 0, sizeof(att_state));
  1101. att_state[0].colorWriteMask = 0xf;
  1102. att_state[0].blendEnable = VK_FALSE;
  1103. cb.attachmentCount = 1;
  1104. cb.pAttachments = att_state;
  1105. memset(&vp, 0, sizeof(vp));
  1106. vp.sType = VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_STATE_CREATE_INFO;
  1107. vp.viewportCount = 1;
  1108. dynamicStateEnables[dynamicState.dynamicStateCount++] =
  1109. VK_DYNAMIC_STATE_VIEWPORT;
  1110. vp.scissorCount = 1;
  1111. dynamicStateEnables[dynamicState.dynamicStateCount++] =
  1112. VK_DYNAMIC_STATE_SCISSOR;
  1113. memset(&ds, 0, sizeof(ds));
  1114. ds.sType = VK_STRUCTURE_TYPE_PIPELINE_DEPTH_STENCIL_STATE_CREATE_INFO;
  1115. ds.depthTestEnable = VK_TRUE;
  1116. ds.depthWriteEnable = VK_TRUE;
  1117. ds.depthCompareOp = VK_COMPARE_OP_LESS_OR_EQUAL;
  1118. ds.depthBoundsTestEnable = VK_FALSE;
  1119. ds.back.failOp = VK_STENCIL_OP_KEEP;
  1120. ds.back.passOp = VK_STENCIL_OP_KEEP;
  1121. ds.back.compareOp = VK_COMPARE_OP_ALWAYS;
  1122. ds.stencilTestEnable = VK_FALSE;
  1123. ds.front = ds.back;
  1124. memset(&ms, 0, sizeof(ms));
  1125. ms.sType = VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO;
  1126. ms.pSampleMask = NULL;
  1127. ms.rasterizationSamples = VK_SAMPLE_COUNT_1_BIT;
  1128. // Two stages: vs and fs
  1129. pipeline.stageCount = 2;
  1130. VkPipelineShaderStageCreateInfo shaderStages[2];
  1131. memset(&shaderStages, 0, 2 * sizeof(VkPipelineShaderStageCreateInfo));
  1132. shaderStages[0].sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO;
  1133. shaderStages[0].stage = VK_SHADER_STAGE_VERTEX_BIT;
  1134. shaderStages[0].module = demo_prepare_vs(demo);
  1135. shaderStages[0].pName = "main";
  1136. shaderStages[1].sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO;
  1137. shaderStages[1].stage = VK_SHADER_STAGE_FRAGMENT_BIT;
  1138. shaderStages[1].module = demo_prepare_fs(demo);
  1139. shaderStages[1].pName = "main";
  1140. pipeline.pVertexInputState = &vi;
  1141. pipeline.pInputAssemblyState = &ia;
  1142. pipeline.pRasterizationState = &rs;
  1143. pipeline.pColorBlendState = &cb;
  1144. pipeline.pMultisampleState = &ms;
  1145. pipeline.pViewportState = &vp;
  1146. pipeline.pDepthStencilState = &ds;
  1147. pipeline.pStages = shaderStages;
  1148. pipeline.renderPass = demo->render_pass;
  1149. pipeline.pDynamicState = &dynamicState;
  1150. memset(&pipelineCache, 0, sizeof(pipelineCache));
  1151. pipelineCache.sType = VK_STRUCTURE_TYPE_PIPELINE_CACHE_CREATE_INFO;
  1152. err = vkCreatePipelineCache(demo->device, &pipelineCache, NULL,
  1153. &demo->pipelineCache);
  1154. assert(!err);
  1155. err = vkCreateGraphicsPipelines(demo->device, demo->pipelineCache, 1,
  1156. &pipeline, NULL, &demo->pipeline);
  1157. assert(!err);
  1158. vkDestroyPipelineCache(demo->device, demo->pipelineCache, NULL);
  1159. vkDestroyShaderModule(demo->device, demo->frag_shader_module, NULL);
  1160. vkDestroyShaderModule(demo->device, demo->vert_shader_module, NULL);
  1161. }
  1162. static void demo_prepare_descriptor_pool(struct demo *demo) {
  1163. const VkDescriptorPoolSize type_count = {
  1164. .type = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER,
  1165. .descriptorCount = DEMO_TEXTURE_COUNT,
  1166. };
  1167. const VkDescriptorPoolCreateInfo descriptor_pool = {
  1168. .sType = VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO,
  1169. .pNext = NULL,
  1170. .maxSets = 1,
  1171. .poolSizeCount = 1,
  1172. .pPoolSizes = &type_count,
  1173. };
  1174. VkResult U_ASSERT_ONLY err;
  1175. err = vkCreateDescriptorPool(demo->device, &descriptor_pool, NULL,
  1176. &demo->desc_pool);
  1177. assert(!err);
  1178. }
  1179. static void demo_prepare_descriptor_set(struct demo *demo) {
  1180. VkDescriptorImageInfo tex_descs[DEMO_TEXTURE_COUNT];
  1181. VkWriteDescriptorSet write;
  1182. VkResult U_ASSERT_ONLY err;
  1183. uint32_t i;
  1184. VkDescriptorSetAllocateInfo alloc_info = {
  1185. .sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO,
  1186. .pNext = NULL,
  1187. .descriptorPool = demo->desc_pool,
  1188. .descriptorSetCount = 1,
  1189. .pSetLayouts = &demo->desc_layout};
  1190. err = vkAllocateDescriptorSets(demo->device, &alloc_info, &demo->desc_set);
  1191. assert(!err);
  1192. memset(&tex_descs, 0, sizeof(tex_descs));
  1193. for (i = 0; i < DEMO_TEXTURE_COUNT; i++) {
  1194. tex_descs[i].sampler = demo->textures[i].sampler;
  1195. tex_descs[i].imageView = demo->textures[i].view;
  1196. tex_descs[i].imageLayout = VK_IMAGE_LAYOUT_GENERAL;
  1197. }
  1198. memset(&write, 0, sizeof(write));
  1199. write.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
  1200. write.dstSet = demo->desc_set;
  1201. write.descriptorCount = DEMO_TEXTURE_COUNT;
  1202. write.descriptorType = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER;
  1203. write.pImageInfo = tex_descs;
  1204. vkUpdateDescriptorSets(demo->device, 1, &write, 0, NULL);
  1205. }
  1206. static void demo_prepare_framebuffers(struct demo *demo) {
  1207. VkImageView attachments[2];
  1208. attachments[1] = demo->depth.view;
  1209. const VkFramebufferCreateInfo fb_info = {
  1210. .sType = VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO,
  1211. .pNext = NULL,
  1212. .renderPass = demo->render_pass,
  1213. .attachmentCount = 2,
  1214. .pAttachments = attachments,
  1215. .width = demo->width,
  1216. .height = demo->height,
  1217. .layers = 1,
  1218. };
  1219. VkResult U_ASSERT_ONLY err;
  1220. uint32_t i;
  1221. demo->framebuffers = (VkFramebuffer *)malloc(demo->swapchainImageCount *
  1222. sizeof(VkFramebuffer));
  1223. assert(demo->framebuffers);
  1224. for (i = 0; i < demo->swapchainImageCount; i++) {
  1225. attachments[0] = demo->buffers[i].view;
  1226. err = vkCreateFramebuffer(demo->device, &fb_info, NULL,
  1227. &demo->framebuffers[i]);
  1228. assert(!err);
  1229. }
  1230. }
  1231. static void demo_prepare(struct demo *demo) {
  1232. VkResult U_ASSERT_ONLY err;
  1233. const VkCommandPoolCreateInfo cmd_pool_info = {
  1234. .sType = VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO,
  1235. .pNext = NULL,
  1236. .queueFamilyIndex = demo->graphics_queue_node_index,
  1237. .flags = VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT,
  1238. };
  1239. err = vkCreateCommandPool(demo->device, &cmd_pool_info, NULL,
  1240. &demo->cmd_pool);
  1241. assert(!err);
  1242. const VkCommandBufferAllocateInfo cmd = {
  1243. .sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO,
  1244. .pNext = NULL,
  1245. .commandPool = demo->cmd_pool,
  1246. .level = VK_COMMAND_BUFFER_LEVEL_PRIMARY,
  1247. .commandBufferCount = 1,
  1248. };
  1249. err = vkAllocateCommandBuffers(demo->device, &cmd, &demo->draw_cmd);
  1250. assert(!err);
  1251. demo_prepare_buffers(demo);
  1252. demo_prepare_depth(demo);
  1253. demo_prepare_textures(demo);
  1254. demo_prepare_vertices(demo);
  1255. demo_prepare_descriptor_layout(demo);
  1256. demo_prepare_render_pass(demo);
  1257. demo_prepare_pipeline(demo);
  1258. demo_prepare_descriptor_pool(demo);
  1259. demo_prepare_descriptor_set(demo);
  1260. demo_prepare_framebuffers(demo);
  1261. }
  1262. static void demo_error_callback(int error, const char* description) {
  1263. printf("GLFW error: %s\n", description);
  1264. fflush(stdout);
  1265. }
  1266. static void demo_key_callback(GLFWwindow* window, int key, int scancode, int action, int mods) {
  1267. if (key == GLFW_KEY_ESCAPE && action == GLFW_RELEASE)
  1268. glfwSetWindowShouldClose(window, GLFW_TRUE);
  1269. }
  1270. static void demo_refresh_callback(GLFWwindow* window) {
  1271. struct demo* demo = glfwGetWindowUserPointer(window);
  1272. demo_draw(demo);
  1273. }
  1274. static void demo_resize_callback(GLFWwindow* window, int width, int height) {
  1275. struct demo* demo = glfwGetWindowUserPointer(window);
  1276. demo->width = width;
  1277. demo->height = height;
  1278. demo_resize(demo);
  1279. }
  1280. static void demo_run(struct demo *demo) {
  1281. while (!glfwWindowShouldClose(demo->window)) {
  1282. glfwPollEvents();
  1283. demo_draw(demo);
  1284. if (demo->depthStencil > 0.99f)
  1285. demo->depthIncrement = -0.001f;
  1286. if (demo->depthStencil < 0.8f)
  1287. demo->depthIncrement = 0.001f;
  1288. demo->depthStencil += demo->depthIncrement;
  1289. // Wait for work to finish before updating MVP.
  1290. vkDeviceWaitIdle(demo->device);
  1291. demo->curFrame++;
  1292. if (demo->frameCount != INT32_MAX && demo->curFrame == demo->frameCount)
  1293. glfwSetWindowShouldClose(demo->window, GLFW_TRUE);
  1294. }
  1295. }
  1296. static void demo_create_window(struct demo *demo) {
  1297. glfwWindowHint(GLFW_CLIENT_API, GLFW_NO_API);
  1298. demo->window = glfwCreateWindow(demo->width,
  1299. demo->height,
  1300. APP_LONG_NAME,
  1301. NULL,
  1302. NULL);
  1303. if (!demo->window) {
  1304. // It didn't work, so try to give a useful error:
  1305. printf("Cannot create a window in which to draw!\n");
  1306. fflush(stdout);
  1307. exit(1);
  1308. }
  1309. glfwSetWindowUserPointer(demo->window, demo);
  1310. glfwSetWindowRefreshCallback(demo->window, demo_refresh_callback);
  1311. glfwSetFramebufferSizeCallback(demo->window, demo_resize_callback);
  1312. glfwSetKeyCallback(demo->window, demo_key_callback);
  1313. }
  1314. /*
  1315. * Return 1 (true) if all layer names specified in check_names
  1316. * can be found in given layer properties.
  1317. */
  1318. static VkBool32 demo_check_layers(uint32_t check_count, const char **check_names,
  1319. uint32_t layer_count,
  1320. VkLayerProperties *layers) {
  1321. uint32_t i, j;
  1322. for (i = 0; i < check_count; i++) {
  1323. VkBool32 found = 0;
  1324. for (j = 0; j < layer_count; j++) {
  1325. if (!strcmp(check_names[i], layers[j].layerName)) {
  1326. found = 1;
  1327. break;
  1328. }
  1329. }
  1330. if (!found) {
  1331. fprintf(stderr, "Cannot find layer: %s\n", check_names[i]);
  1332. return 0;
  1333. }
  1334. }
  1335. return 1;
  1336. }
  1337. static void demo_init_vk(struct demo *demo) {
  1338. VkResult err;
  1339. uint32_t i = 0;
  1340. uint32_t required_extension_count = 0;
  1341. uint32_t instance_extension_count = 0;
  1342. uint32_t instance_layer_count = 0;
  1343. uint32_t validation_layer_count = 0;
  1344. const char **required_extensions = NULL;
  1345. const char **instance_validation_layers = NULL;
  1346. demo->enabled_extension_count = 0;
  1347. demo->enabled_layer_count = 0;
  1348. char *instance_validation_layers_alt1[] = {
  1349. "VK_LAYER_LUNARG_standard_validation"
  1350. };
  1351. char *instance_validation_layers_alt2[] = {
  1352. "VK_LAYER_GOOGLE_threading", "VK_LAYER_LUNARG_parameter_validation",
  1353. "VK_LAYER_LUNARG_object_tracker", "VK_LAYER_LUNARG_image",
  1354. "VK_LAYER_LUNARG_core_validation", "VK_LAYER_LUNARG_swapchain",
  1355. "VK_LAYER_GOOGLE_unique_objects"
  1356. };
  1357. /* Look for validation layers */
  1358. VkBool32 validation_found = 0;
  1359. if (demo->validate) {
  1360. err = vkEnumerateInstanceLayerProperties(&instance_layer_count, NULL);
  1361. assert(!err);
  1362. instance_validation_layers = (const char**) instance_validation_layers_alt1;
  1363. if (instance_layer_count > 0) {
  1364. VkLayerProperties *instance_layers =
  1365. malloc(sizeof (VkLayerProperties) * instance_layer_count);
  1366. err = vkEnumerateInstanceLayerProperties(&instance_layer_count,
  1367. instance_layers);
  1368. assert(!err);
  1369. validation_found = demo_check_layers(
  1370. ARRAY_SIZE(instance_validation_layers_alt1),
  1371. instance_validation_layers, instance_layer_count,
  1372. instance_layers);
  1373. if (validation_found) {
  1374. demo->enabled_layer_count = ARRAY_SIZE(instance_validation_layers_alt1);
  1375. demo->enabled_layers[0] = "VK_LAYER_LUNARG_standard_validation";
  1376. validation_layer_count = 1;
  1377. } else {
  1378. // use alternative set of validation layers
  1379. instance_validation_layers =
  1380. (const char**) instance_validation_layers_alt2;
  1381. demo->enabled_layer_count = ARRAY_SIZE(instance_validation_layers_alt2);
  1382. validation_found = demo_check_layers(
  1383. ARRAY_SIZE(instance_validation_layers_alt2),
  1384. instance_validation_layers, instance_layer_count,
  1385. instance_layers);
  1386. validation_layer_count =
  1387. ARRAY_SIZE(instance_validation_layers_alt2);
  1388. for (i = 0; i < validation_layer_count; i++) {
  1389. demo->enabled_layers[i] = instance_validation_layers[i];
  1390. }
  1391. }
  1392. free(instance_layers);
  1393. }
  1394. if (!validation_found) {
  1395. ERR_EXIT("vkEnumerateInstanceLayerProperties failed to find "
  1396. "required validation layer.\n\n"
  1397. "Please look at the Getting Started guide for additional "
  1398. "information.\n",
  1399. "vkCreateInstance Failure");
  1400. }
  1401. }
  1402. /* Look for instance extensions */
  1403. required_extensions = glfwGetRequiredInstanceExtensions(&required_extension_count);
  1404. if (!required_extensions) {
  1405. ERR_EXIT("glfwGetRequiredInstanceExtensions failed to find the "
  1406. "platform surface extensions.\n\nDo you have a compatible "
  1407. "Vulkan installable client driver (ICD) installed?\nPlease "
  1408. "look at the Getting Started guide for additional "
  1409. "information.\n",
  1410. "vkCreateInstance Failure");
  1411. }
  1412. for (i = 0; i < required_extension_count; i++) {
  1413. demo->extension_names[demo->enabled_extension_count++] = required_extensions[i];
  1414. assert(demo->enabled_extension_count < 64);
  1415. }
  1416. err = vkEnumerateInstanceExtensionProperties(
  1417. NULL, &instance_extension_count, NULL);
  1418. assert(!err);
  1419. if (instance_extension_count > 0) {
  1420. VkExtensionProperties *instance_extensions =
  1421. malloc(sizeof(VkExtensionProperties) * instance_extension_count);
  1422. err = vkEnumerateInstanceExtensionProperties(
  1423. NULL, &instance_extension_count, instance_extensions);
  1424. assert(!err);
  1425. for (i = 0; i < instance_extension_count; i++) {
  1426. if (!strcmp(VK_EXT_DEBUG_REPORT_EXTENSION_NAME,
  1427. instance_extensions[i].extensionName)) {
  1428. if (demo->validate) {
  1429. demo->extension_names[demo->enabled_extension_count++] =
  1430. VK_EXT_DEBUG_REPORT_EXTENSION_NAME;
  1431. }
  1432. }
  1433. assert(demo->enabled_extension_count < 64);
  1434. }
  1435. free(instance_extensions);
  1436. }
  1437. const VkApplicationInfo app = {
  1438. .sType = VK_STRUCTURE_TYPE_APPLICATION_INFO,
  1439. .pNext = NULL,
  1440. .pApplicationName = APP_SHORT_NAME,
  1441. .applicationVersion = 0,
  1442. .pEngineName = APP_SHORT_NAME,
  1443. .engineVersion = 0,
  1444. .apiVersion = VK_API_VERSION_1_0,
  1445. };
  1446. VkInstanceCreateInfo inst_info = {
  1447. .sType = VK_STRUCTURE_TYPE_INSTANCE_CREATE_INFO,
  1448. .pNext = NULL,
  1449. .pApplicationInfo = &app,
  1450. .enabledLayerCount = demo->enabled_layer_count,
  1451. .ppEnabledLayerNames = (const char *const *)instance_validation_layers,
  1452. .enabledExtensionCount = demo->enabled_extension_count,
  1453. .ppEnabledExtensionNames = (const char *const *)demo->extension_names,
  1454. };
  1455. uint32_t gpu_count;
  1456. err = vkCreateInstance(&inst_info, NULL, &demo->inst);
  1457. if (err == VK_ERROR_INCOMPATIBLE_DRIVER) {
  1458. ERR_EXIT("Cannot find a compatible Vulkan installable client driver "
  1459. "(ICD).\n\nPlease look at the Getting Started guide for "
  1460. "additional information.\n",
  1461. "vkCreateInstance Failure");
  1462. } else if (err == VK_ERROR_EXTENSION_NOT_PRESENT) {
  1463. ERR_EXIT("Cannot find a specified extension library"
  1464. ".\nMake sure your layers path is set appropriately\n",
  1465. "vkCreateInstance Failure");
  1466. } else if (err) {
  1467. ERR_EXIT("vkCreateInstance failed.\n\nDo you have a compatible Vulkan "
  1468. "installable client driver (ICD) installed?\nPlease look at "
  1469. "the Getting Started guide for additional information.\n",
  1470. "vkCreateInstance Failure");
  1471. }
  1472. gladLoadVulkanUserPtr(NULL, glad_vulkan_callback, demo->inst);
  1473. /* Make initial call to query gpu_count, then second call for gpu info*/
  1474. err = vkEnumeratePhysicalDevices(demo->inst, &gpu_count, NULL);
  1475. assert(!err && gpu_count > 0);
  1476. if (gpu_count > 0) {
  1477. VkPhysicalDevice *physical_devices =
  1478. malloc(sizeof(VkPhysicalDevice) * gpu_count);
  1479. err = vkEnumeratePhysicalDevices(demo->inst, &gpu_count,
  1480. physical_devices);
  1481. assert(!err);
  1482. /* For tri demo we just grab the first physical device */
  1483. demo->gpu = physical_devices[0];
  1484. free(physical_devices);
  1485. } else {
  1486. ERR_EXIT("vkEnumeratePhysicalDevices reported zero accessible devices."
  1487. "\n\nDo you have a compatible Vulkan installable client"
  1488. " driver (ICD) installed?\nPlease look at the Getting Started"
  1489. " guide for additional information.\n",
  1490. "vkEnumeratePhysicalDevices Failure");
  1491. }
  1492. gladLoadVulkanUserPtr(demo->gpu, glad_vulkan_callback, demo->inst);
  1493. /* Look for device extensions */
  1494. uint32_t device_extension_count = 0;
  1495. VkBool32 swapchainExtFound = 0;
  1496. demo->enabled_extension_count = 0;
  1497. err = vkEnumerateDeviceExtensionProperties(demo->gpu, NULL,
  1498. &device_extension_count, NULL);
  1499. assert(!err);
  1500. if (device_extension_count > 0) {
  1501. VkExtensionProperties *device_extensions =
  1502. malloc(sizeof(VkExtensionProperties) * device_extension_count);
  1503. err = vkEnumerateDeviceExtensionProperties(
  1504. demo->gpu, NULL, &device_extension_count, device_extensions);
  1505. assert(!err);
  1506. for (i = 0; i < device_extension_count; i++) {
  1507. if (!strcmp(VK_KHR_SWAPCHAIN_EXTENSION_NAME,
  1508. device_extensions[i].extensionName)) {
  1509. swapchainExtFound = 1;
  1510. demo->extension_names[demo->enabled_extension_count++] =
  1511. VK_KHR_SWAPCHAIN_EXTENSION_NAME;
  1512. }
  1513. assert(demo->enabled_extension_count < 64);
  1514. }
  1515. free(device_extensions);
  1516. }
  1517. if (!swapchainExtFound) {
  1518. ERR_EXIT("vkEnumerateDeviceExtensionProperties failed to find "
  1519. "the " VK_KHR_SWAPCHAIN_EXTENSION_NAME
  1520. " extension.\n\nDo you have a compatible "
  1521. "Vulkan installable client driver (ICD) installed?\nPlease "
  1522. "look at the Getting Started guide for additional "
  1523. "information.\n",
  1524. "vkCreateInstance Failure");
  1525. }
  1526. if (demo->validate) {
  1527. VkDebugReportCallbackCreateInfoEXT dbgCreateInfo;
  1528. dbgCreateInfo.sType = VK_STRUCTURE_TYPE_DEBUG_REPORT_CREATE_INFO_EXT;
  1529. dbgCreateInfo.flags =
  1530. VK_DEBUG_REPORT_ERROR_BIT_EXT | VK_DEBUG_REPORT_WARNING_BIT_EXT;
  1531. dbgCreateInfo.pfnCallback = demo->use_break ? BreakCallback : dbgFunc;
  1532. dbgCreateInfo.pUserData = demo;
  1533. dbgCreateInfo.pNext = NULL;
  1534. err = vkCreateDebugReportCallbackEXT(demo->inst, &dbgCreateInfo, NULL,
  1535. &demo->msg_callback);
  1536. switch (err) {
  1537. case VK_SUCCESS:
  1538. break;
  1539. case VK_ERROR_OUT_OF_HOST_MEMORY:
  1540. ERR_EXIT("CreateDebugReportCallback: out of host memory\n",
  1541. "CreateDebugReportCallback Failure");
  1542. break;
  1543. default:
  1544. ERR_EXIT("CreateDebugReportCallback: unknown failure\n",
  1545. "CreateDebugReportCallback Failure");
  1546. break;
  1547. }
  1548. }
  1549. vkGetPhysicalDeviceProperties(demo->gpu, &demo->gpu_props);
  1550. // Query with NULL data to get count
  1551. vkGetPhysicalDeviceQueueFamilyProperties(demo->gpu, &demo->queue_count,
  1552. NULL);
  1553. demo->queue_props = (VkQueueFamilyProperties *)malloc(
  1554. demo->queue_count * sizeof(VkQueueFamilyProperties));
  1555. vkGetPhysicalDeviceQueueFamilyProperties(demo->gpu, &demo->queue_count,
  1556. demo->queue_props);
  1557. assert(demo->queue_count >= 1);
  1558. vkGetPhysicalDeviceFeatures(demo->gpu, &demo->gpu_features);
  1559. // Graphics queue and MemMgr queue can be separate.
  1560. // TODO: Add support for separate queues, including synchronization,
  1561. // and appropriate tracking for QueueSubmit
  1562. }
  1563. static void demo_init_device(struct demo *demo) {
  1564. VkResult U_ASSERT_ONLY err;
  1565. float queue_priorities[1] = {0.0};
  1566. const VkDeviceQueueCreateInfo queue = {
  1567. .sType = VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO,
  1568. .pNext = NULL,
  1569. .queueFamilyIndex = demo->graphics_queue_node_index,
  1570. .queueCount = 1,
  1571. .pQueuePriorities = queue_priorities};
  1572. VkPhysicalDeviceFeatures features;
  1573. memset(&features, 0, sizeof(features));
  1574. if (demo->gpu_features.shaderClipDistance) {
  1575. features.shaderClipDistance = VK_TRUE;
  1576. }
  1577. VkDeviceCreateInfo device = {
  1578. .sType = VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO,
  1579. .pNext = NULL,
  1580. .queueCreateInfoCount = 1,
  1581. .pQueueCreateInfos = &queue,
  1582. .enabledLayerCount = 0,
  1583. .ppEnabledLayerNames = NULL,
  1584. .enabledExtensionCount = demo->enabled_extension_count,
  1585. .ppEnabledExtensionNames = (const char *const *)demo->extension_names,
  1586. .pEnabledFeatures = &features,
  1587. };
  1588. err = vkCreateDevice(demo->gpu, &device, NULL, &demo->device);
  1589. assert(!err);
  1590. }
  1591. static void demo_init_vk_swapchain(struct demo *demo) {
  1592. VkResult U_ASSERT_ONLY err;
  1593. uint32_t i;
  1594. // Create a WSI surface for the window:
  1595. glfwCreateWindowSurface(demo->inst, demo->window, NULL, &demo->surface);
  1596. // Iterate over each queue to learn whether it supports presenting:
  1597. VkBool32 *supportsPresent =
  1598. (VkBool32 *)malloc(demo->queue_count * sizeof(VkBool32));
  1599. for (i = 0; i < demo->queue_count; i++) {
  1600. vkGetPhysicalDeviceSurfaceSupportKHR(demo->gpu, i, demo->surface,
  1601. &supportsPresent[i]);
  1602. }
  1603. // Search for a graphics and a present queue in the array of queue
  1604. // families, try to find one that supports both
  1605. uint32_t graphicsQueueNodeIndex = UINT32_MAX;
  1606. uint32_t presentQueueNodeIndex = UINT32_MAX;
  1607. for (i = 0; i < demo->queue_count; i++) {
  1608. if ((demo->queue_props[i].queueFlags & VK_QUEUE_GRAPHICS_BIT) != 0) {
  1609. if (graphicsQueueNodeIndex == UINT32_MAX) {
  1610. graphicsQueueNodeIndex = i;
  1611. }
  1612. if (supportsPresent[i] == VK_TRUE) {
  1613. graphicsQueueNodeIndex = i;
  1614. presentQueueNodeIndex = i;
  1615. break;
  1616. }
  1617. }
  1618. }
  1619. if (presentQueueNodeIndex == UINT32_MAX) {
  1620. // If didn't find a queue that supports both graphics and present, then
  1621. // find a separate present queue.
  1622. for (i = 0; i < demo->queue_count; ++i) {
  1623. if (supportsPresent[i] == VK_TRUE) {
  1624. presentQueueNodeIndex = i;
  1625. break;
  1626. }
  1627. }
  1628. }
  1629. free(supportsPresent);
  1630. // Generate error if could not find both a graphics and a present queue
  1631. if (graphicsQueueNodeIndex == UINT32_MAX ||
  1632. presentQueueNodeIndex == UINT32_MAX) {
  1633. ERR_EXIT("Could not find a graphics and a present queue\n",
  1634. "Swapchain Initialization Failure");
  1635. }
  1636. // TODO: Add support for separate queues, including presentation,
  1637. // synchronization, and appropriate tracking for QueueSubmit.
  1638. // NOTE: While it is possible for an application to use a separate graphics
  1639. // and a present queues, this demo program assumes it is only using
  1640. // one:
  1641. if (graphicsQueueNodeIndex != presentQueueNodeIndex) {
  1642. ERR_EXIT("Could not find a common graphics and a present queue\n",
  1643. "Swapchain Initialization Failure");
  1644. }
  1645. demo->graphics_queue_node_index = graphicsQueueNodeIndex;
  1646. demo_init_device(demo);
  1647. vkGetDeviceQueue(demo->device, demo->graphics_queue_node_index, 0,
  1648. &demo->queue);
  1649. // Get the list of VkFormat's that are supported:
  1650. uint32_t formatCount;
  1651. err = vkGetPhysicalDeviceSurfaceFormatsKHR(demo->gpu, demo->surface,
  1652. &formatCount, NULL);
  1653. assert(!err);
  1654. VkSurfaceFormatKHR *surfFormats =
  1655. (VkSurfaceFormatKHR *)malloc(formatCount * sizeof(VkSurfaceFormatKHR));
  1656. err = vkGetPhysicalDeviceSurfaceFormatsKHR(demo->gpu, demo->surface,
  1657. &formatCount, surfFormats);
  1658. assert(!err);
  1659. // If the format list includes just one entry of VK_FORMAT_UNDEFINED,
  1660. // the surface has no preferred format. Otherwise, at least one
  1661. // supported format will be returned.
  1662. if (formatCount == 1 && surfFormats[0].format == VK_FORMAT_UNDEFINED) {
  1663. demo->format = VK_FORMAT_B8G8R8A8_UNORM;
  1664. } else {
  1665. assert(formatCount >= 1);
  1666. demo->format = surfFormats[0].format;
  1667. }
  1668. demo->color_space = surfFormats[0].colorSpace;
  1669. demo->curFrame = 0;
  1670. // Get Memory information and properties
  1671. vkGetPhysicalDeviceMemoryProperties(demo->gpu, &demo->memory_properties);
  1672. }
  1673. static void demo_init_connection(struct demo *demo) {
  1674. glfwSetErrorCallback(demo_error_callback);
  1675. if (!glfwInit()) {
  1676. printf("Cannot initialize GLFW.\nExiting ...\n");
  1677. fflush(stdout);
  1678. exit(1);
  1679. }
  1680. if (!glfwVulkanSupported()) {
  1681. printf("GLFW failed to find the Vulkan loader.\nExiting ...\n");
  1682. fflush(stdout);
  1683. exit(1);
  1684. }
  1685. gladLoadVulkanUserPtr(NULL, glad_vulkan_callback, NULL);
  1686. }
  1687. static void demo_init(struct demo *demo, const int argc, const char *argv[])
  1688. {
  1689. int i;
  1690. memset(demo, 0, sizeof(*demo));
  1691. demo->frameCount = INT32_MAX;
  1692. for (i = 1; i < argc; i++) {
  1693. if (strcmp(argv[i], "--use_staging") == 0) {
  1694. demo->use_staging_buffer = true;
  1695. continue;
  1696. }
  1697. if (strcmp(argv[i], "--break") == 0) {
  1698. demo->use_break = true;
  1699. continue;
  1700. }
  1701. if (strcmp(argv[i], "--validate") == 0) {
  1702. demo->validate = true;
  1703. continue;
  1704. }
  1705. if (strcmp(argv[i], "--c") == 0 && demo->frameCount == INT32_MAX &&
  1706. i < argc - 1 && sscanf(argv[i + 1], "%d", &demo->frameCount) == 1 &&
  1707. demo->frameCount >= 0) {
  1708. i++;
  1709. continue;
  1710. }
  1711. fprintf(stderr, "Usage:\n %s [--use_staging] [--validate] [--break] "
  1712. "[--c <framecount>]\n",
  1713. APP_SHORT_NAME);
  1714. fflush(stderr);
  1715. exit(1);
  1716. }
  1717. demo_init_connection(demo);
  1718. demo_init_vk(demo);
  1719. demo->width = 300;
  1720. demo->height = 300;
  1721. demo->depthStencil = 1.0;
  1722. demo->depthIncrement = -0.01f;
  1723. }
  1724. static void demo_cleanup(struct demo *demo) {
  1725. uint32_t i;
  1726. for (i = 0; i < demo->swapchainImageCount; i++) {
  1727. vkDestroyFramebuffer(demo->device, demo->framebuffers[i], NULL);
  1728. }
  1729. free(demo->framebuffers);
  1730. vkDestroyDescriptorPool(demo->device, demo->desc_pool, NULL);
  1731. if (demo->setup_cmd) {
  1732. vkFreeCommandBuffers(demo->device, demo->cmd_pool, 1, &demo->setup_cmd);
  1733. }
  1734. vkFreeCommandBuffers(demo->device, demo->cmd_pool, 1, &demo->draw_cmd);
  1735. vkDestroyCommandPool(demo->device, demo->cmd_pool, NULL);
  1736. vkDestroyPipeline(demo->device, demo->pipeline, NULL);
  1737. vkDestroyRenderPass(demo->device, demo->render_pass, NULL);
  1738. vkDestroyPipelineLayout(demo->device, demo->pipeline_layout, NULL);
  1739. vkDestroyDescriptorSetLayout(demo->device, demo->desc_layout, NULL);
  1740. vkDestroyBuffer(demo->device, demo->vertices.buf, NULL);
  1741. vkFreeMemory(demo->device, demo->vertices.mem, NULL);
  1742. for (i = 0; i < DEMO_TEXTURE_COUNT; i++) {
  1743. vkDestroyImageView(demo->device, demo->textures[i].view, NULL);
  1744. vkDestroyImage(demo->device, demo->textures[i].image, NULL);
  1745. vkFreeMemory(demo->device, demo->textures[i].mem, NULL);
  1746. vkDestroySampler(demo->device, demo->textures[i].sampler, NULL);
  1747. }
  1748. for (i = 0; i < demo->swapchainImageCount; i++) {
  1749. vkDestroyImageView(demo->device, demo->buffers[i].view, NULL);
  1750. }
  1751. vkDestroyImageView(demo->device, demo->depth.view, NULL);
  1752. vkDestroyImage(demo->device, demo->depth.image, NULL);
  1753. vkFreeMemory(demo->device, demo->depth.mem, NULL);
  1754. vkDestroySwapchainKHR(demo->device, demo->swapchain, NULL);
  1755. free(demo->buffers);
  1756. vkDestroyDevice(demo->device, NULL);
  1757. if (demo->validate) {
  1758. vkDestroyDebugReportCallbackEXT(demo->inst, demo->msg_callback, NULL);
  1759. }
  1760. vkDestroySurfaceKHR(demo->inst, demo->surface, NULL);
  1761. vkDestroyInstance(demo->inst, NULL);
  1762. free(demo->queue_props);
  1763. glfwDestroyWindow(demo->window);
  1764. glfwTerminate();
  1765. }
  1766. static void demo_resize(struct demo *demo) {
  1767. uint32_t i;
  1768. // In order to properly resize the window, we must re-create the swapchain
  1769. // AND redo the command buffers, etc.
  1770. //
  1771. // First, perform part of the demo_cleanup() function:
  1772. for (i = 0; i < demo->swapchainImageCount; i++) {
  1773. vkDestroyFramebuffer(demo->device, demo->framebuffers[i], NULL);
  1774. }
  1775. free(demo->framebuffers);
  1776. vkDestroyDescriptorPool(demo->device, demo->desc_pool, NULL);
  1777. if (demo->setup_cmd) {
  1778. vkFreeCommandBuffers(demo->device, demo->cmd_pool, 1, &demo->setup_cmd);
  1779. demo->setup_cmd = VK_NULL_HANDLE;
  1780. }
  1781. vkFreeCommandBuffers(demo->device, demo->cmd_pool, 1, &demo->draw_cmd);
  1782. vkDestroyCommandPool(demo->device, demo->cmd_pool, NULL);
  1783. vkDestroyPipeline(demo->device, demo->pipeline, NULL);
  1784. vkDestroyRenderPass(demo->device, demo->render_pass, NULL);
  1785. vkDestroyPipelineLayout(demo->device, demo->pipeline_layout, NULL);
  1786. vkDestroyDescriptorSetLayout(demo->device, demo->desc_layout, NULL);
  1787. vkDestroyBuffer(demo->device, demo->vertices.buf, NULL);
  1788. vkFreeMemory(demo->device, demo->vertices.mem, NULL);
  1789. for (i = 0; i < DEMO_TEXTURE_COUNT; i++) {
  1790. vkDestroyImageView(demo->device, demo->textures[i].view, NULL);
  1791. vkDestroyImage(demo->device, demo->textures[i].image, NULL);
  1792. vkFreeMemory(demo->device, demo->textures[i].mem, NULL);
  1793. vkDestroySampler(demo->device, demo->textures[i].sampler, NULL);
  1794. }
  1795. for (i = 0; i < demo->swapchainImageCount; i++) {
  1796. vkDestroyImageView(demo->device, demo->buffers[i].view, NULL);
  1797. }
  1798. vkDestroyImageView(demo->device, demo->depth.view, NULL);
  1799. vkDestroyImage(demo->device, demo->depth.image, NULL);
  1800. vkFreeMemory(demo->device, demo->depth.mem, NULL);
  1801. free(demo->buffers);
  1802. // Second, re-perform the demo_prepare() function, which will re-create the
  1803. // swapchain:
  1804. demo_prepare(demo);
  1805. }
  1806. int main(const int argc, const char *argv[]) {
  1807. struct demo demo;
  1808. demo_init(&demo, argc, argv);
  1809. demo_create_window(&demo);
  1810. demo_init_vk_swapchain(&demo);
  1811. demo_prepare(&demo);
  1812. demo_run(&demo);
  1813. demo_cleanup(&demo);
  1814. return validation_error;
  1815. }