// // Copyright (C) 2024 The Android Open Source Project // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // package main import ( "berberis/cpp_types" "berberis/vulkan_types" "berberis/vulkan_xml" "errors" "fmt" "io" "io/ioutil" "os" "path" "sort" "strings" ) func main() { var host_arch cpp_types.Arch var guest_arch cpp_types.Arch var vk_xml_filename string var vulkan_xml_filename string var custom_trampolines_filename string args := os.Args[1:] id := 0 for id < len(args) { if args[id] == "--guest_arch" { id++ switch { case args[id] == "arm": guest_arch = cpp_types.Arm case args[id] == "arm64": guest_arch = cpp_types.Arm64 case args[id] == "risvc32": guest_arch = cpp_types.Riscv32 case args[id] == "riscv64": guest_arch = cpp_types.Riscv64 case args[id] == "x86": guest_arch = cpp_types.X86 case args[id] == "x86_64": guest_arch = cpp_types.X86_64 } } else if args[id] == "--host_arch" { id++ switch { case args[id] == "arm": host_arch = cpp_types.Arm case args[id] == "arm64": host_arch = cpp_types.Arm64 case args[id] == "riscv32": host_arch = cpp_types.Riscv32 case args[id] == "riscv64": host_arch = cpp_types.Riscv64 case args[id] == "x86": host_arch = cpp_types.X86 case args[id] == "x86_64": host_arch = cpp_types.X86_64 } } else if args[id] == "--input" { id++ vk_xml_filename = args[id] } else if args[id] == "--xml" { id++ vulkan_xml_filename = args[id] } else if args[id] == "--json" { id++ custom_trampolines_filename = args[id] } else { panic(` Spurious arguments! Usage: gen_vulkan --input vk.xml --output vulkan_xml.h `) } id++ } xmlFile, err := os.Open(vk_xml_filename) if err != nil { panic(err) } defer xmlFile.Close() byteValue, _ := ioutil.ReadAll(xmlFile) registry, err := vulkan_xml.Unmarshal(byteValue) if err != nil { panic(err) } sorted_type_names, types, sorted_command_names, commands, extensions, err := vulkan_xml.VulkanTypesfromXML(registry) if err != nil { panic(err) } if vulkan_xml_filename != "" { err = generateVulkanXML(sorted_type_names, types, sorted_command_names, commands, extensions, vulkan_xml_filename, host_arch, guest_arch) if err != nil { panic(err) } } if custom_trampolines_filename != "" { err = generateCustomTrampolines(custom_trampolines_filename, sorted_command_names, commands, host_arch, guest_arch) if err != nil { panic(err) } } } func generateVulkanXML(sorted_type_names []string, types map[string]cpp_types.Type, sorted_command_names []string, commands map[string]cpp_types.Type, extensions map[string]int64, output_file_name string, host_arch, guest_arch cpp_types.Arch) error { out_file, err := os.OpenFile(output_file_name, os.O_APPEND|os.O_CREATE|os.O_WRONLY, 0644) if err != nil { return err } defer out_file.Close() _, err = fmt.Fprintf(out_file, `// This file is automatically generated by %s // DO NOT EDIT! // clang-format off `, path.Base(os.Args[0])) if err != nil { return err } sorted_struct_type_names, err := sortStructTypes(sorted_type_names, types) if err != nil { return err } err = printAliasTypes(out_file, sorted_type_names, types) if err != nil { return err } err = printEnums(out_file, sorted_type_names, types) if err != nil { return err } err = printEnumAliases(out_file, sorted_type_names, types) if err != nil { return err } conversions, err := getRequiredConversions(commands, types) if err != nil { return err } err = printHostStructTypes(out_file, sorted_struct_type_names, types) if err != nil { return err } err = printFunctionPointerTypes(out_file, sorted_command_names, commands) if err != nil { return err } _, err = fmt.Fprint(out_file, "#ifndef BERBERIS_LAYOUT_CHECK_ONLY\n") if err != nil { return err } err = printExtensionsMap(out_file, extensions) if err != nil { return err } _, err = fmt.Fprint(out_file, "} // namespace\n\n") if err != nil { return err } err = printGuestStructTypes(out_file, sorted_struct_type_names, types, conversions, host_arch, guest_arch) if err != nil { return err } err = printConvertOptionalStructures(out_file, sorted_struct_type_names, types, conversions, host_arch, guest_arch) if err != nil { return err } _, err = fmt.Fprint(out_file, ` // Note: we put all the conversion routines in the anonymous namespace to make sure we are not // generating dead code or referencing non-existing code: attempt to use static function which // is not defined is error and if function is unreferenced that causes error since we are compiling // code with -Wunused-function -Werror options. // // But this requires definition certain stub functions in emulated_api_checker.cc namespace { // These trampolines and runners are too complex to be auto-generated. void DoCustomTrampolineWithThunk_vkAllocateCommandBuffers(HostCode callee, ProcessState* state); void DoCustomTrampolineWithThunk_vkBeginCommandBuffer(HostCode callee, ProcessState* state); void DoCustomTrampolineWithThunk_vkEnumerateDeviceExtensionProperties(HostCode callee, ProcessState* state); void DoCustomTrampolineWithThunk_vkEnumerateInstanceExtensionProperties(HostCode callee, ProcessState* state); void DoCustomTrampolineWithThunk_vkFreeCommandBuffers(HostCode callee, ProcessState* state); void DoCustomTrampolineWithThunk_vkGetDeviceProcAddr(HostCode callee, ProcessState* state); void DoCustomTrampolineWithThunk_vkGetInstanceProcAddr(HostCode callee, ProcessState* state); void RunGuest_vkEnumerateDeviceExtensionProperties(GuestAddr pc, GuestArgumentBuffer* buf); void RunGuest_vkEnumerateInstanceExtensionProperties(GuestAddr pc, GuestArgumentBuffer* buf); void RunGuest_vkCreateInstance(GuestAddr pc, GuestArgumentBuffer* buf); void RunGuest_vkGetDeviceProcAddr(GuestAddr pc, GuestArgumentBuffer* buf); void RunGuest_vkGetInstanceProcAddr(GuestAddr pc, GuestArgumentBuffer* buf); `) if err != nil { return err } err = printCustomTrampolies(out_file, sorted_command_names, commands, host_arch, guest_arch) if err != nil { return err } err = printCustomGuestRunners(out_file, sorted_command_names, commands, host_arch, guest_arch) if err != nil { return err } err = printMaps(out_file, sorted_command_names, commands, host_arch, guest_arch) if err != nil { return err } err = printGuestStructVerification(out_file, sorted_type_names, types, host_arch, guest_arch) if err != nil { return err } _, err = fmt.Fprintf(out_file, `#endif // BERBERIS_LAYOUT_CHECK_ONLY } // namespace } // namespace berberis // Note: above we define all the Vulkan-related types that we are using, and don't rely on types // from official "vulkan.h" header. // This is because certain "official" types can only be used on certain platforms. E.g. you must be // on Windows platform to imclude Windows-specific types from vulkan_win32.h, you need OS with // Wayland support to use vulkan_wayland.h and so on. // The majority of types are platform-independent though thus comparing layout of our host-based // types to these is an excellent way to make sure our generator doesn't generate bogus type // definitions. #define VK_ENABLE_BETA_EXTENSIONS 1 #include #include #include #include `) if err != nil { return err } err = printAliasVerification(out_file, sorted_type_names, types, host_arch, guest_arch) if err != nil { return err } err = printEnumVerification(out_file, sorted_type_names, types, host_arch, guest_arch) if err != nil { return err } err = printHostStructVerification(out_file, sorted_type_names, types, host_arch, guest_arch) if err != nil { return err } _, err = fmt.Fprintf(out_file, "// clang-format on\n") return err } func generateCustomTrampolines(output_file_name string, sorted_command_names []string, commands map[string]cpp_types.Type, host_arch, guest_arch cpp_types.Arch) (err error) { out_file, err := os.OpenFile(output_file_name, os.O_APPEND|os.O_CREATE|os.O_WRONLY, 0644) if err != nil { return err } defer out_file.Close() symbols_list := []string{} for _, name := range sorted_command_names { command := commands[name] params_are_compatible := true switch name { // These functions are compatible based on signatures, but actually need special processing. case "vkGetDeviceProcAddr", "vkGetInstanceProcAddr": params_are_compatible = false } for i := uint(0); i < command.NumField(guest_arch); i++ { param_type := command.Field(i, guest_arch).Type() if !isInputCompatible(param_type, host_arch, guest_arch) { params_are_compatible = false break } } if !params_are_compatible { symbols_list = append(symbols_list, fmt.Sprintf(` "%s": { "call_method": "custom_trampoline_with_thunk" }`, name)) } } _, err = fmt.Fprintf(out_file, `{ "config": { "ignore_non_present": true, "reason": "some Vulkan functions are in drivers and are supported by our proxy while libvulkan.so lags behind" }, "symbols": { %s }, "types": { "struct ANativeWindow": { "force_compatible": true, "reason_for_compatible": [ "This struct is full of function pointers ", "which actually could be used by a guest code. ", "It's too late to try to fix anything in ", "Vulkan library though since these functions ", "could be used before that point. This must ", "be handled by NativeActivity wrapper. " ] } } } `, strings.Join(symbols_list, ",\n")) if err != nil { return err } return err } func printAliasTypes(w io.Writer, sorted_type_names []string, types map[string]cpp_types.Type) (err error) { printed_aliases := make(map[string]cpp_types.Type) next_alias_types_list := []cpp_types.Type{} for _, name := range sorted_type_names { typе := types[name] if vulkan_types.IsVulkanHandle(typе) { _, err := fmt.Fprintf(w, "BERBERIS_VK_DEFINE_HANDLE(%s);\n\n", name) if err != nil { return err } printed_aliases[name] = typе } else if vulkan_types.IsVulkanNondispatchableHandle(typе) { _, err := fmt.Fprintf(w, "BERBERIS_VK_DEFINE_NON_DISPATCHABLE_HANDLE(%s);\n\n", name) if err != nil { return err } printed_aliases[name] = typе } else if isAlias(typе) && !isAliasOfEnum(typе) { base_name := typе.Elem(cpp_types.FirstArch).Name(cpp_types.FirstArch) for arch := cpp_types.FirstArch + 1; arch <= cpp_types.LastArch; arch++ { if base_name != typе.Elem(arch).Name(arch) { return errors.New("Inconsistent alias \"" + name + "\"") } } next_alias_types_list = append(next_alias_types_list, typе) } } var alias_types_list []cpp_types.Type for len(next_alias_types_list) > 0 { // If next list is the same as previous one then we have some kind of loop and types couldn't be defined. if len(alias_types_list) == len(next_alias_types_list) { return errors.New("Cannot make any progress: type \"" + alias_types_list[0].Name(cpp_types.FirstArch) + "\" refers to undefined type: \"" + alias_types_list[0].Elem(cpp_types.FirstArch).Name(cpp_types.FirstArch) + "\"\"") } alias_types_list = next_alias_types_list next_alias_types_list = []cpp_types.Type{} for _, typе := range alias_types_list { if _, ok := printed_aliases[typе.Elem(cpp_types.FirstArch).Name(cpp_types.FirstArch)]; ok || !isAlias(typе.Elem(cpp_types.FirstArch)) { name := typе.Name(cpp_types.FirstArch) base_type := typе.Elem(cpp_types.FirstArch) base_name := base_type.Name(cpp_types.FirstArch) if isStruct(base_type) || isUnion(base_type) { _, err := fmt.Fprintf(w, "%s;\n", base_name) if err != nil { return err } } _, err := fmt.Fprintf(w, "using %s = %s;\n\n", name, base_name) if err != nil { return err } printed_aliases[name] = typе } else { next_alias_types_list = append(next_alias_types_list, typе) } } } return nil } func printAliasVerification(w io.Writer, sorted_type_names []string, types map[string]cpp_types.Type, host_arch, guest_arch cpp_types.Arch) error { for _, name := range sorted_type_names { typе := types[name] if !vulkan_types.IsVulkanHandle(typе) && !vulkan_types.IsVulkanNondispatchableHandle(typе) && !isAlias(typе) { continue } if isAliasOfOpaque(typе) { continue } _, err := fmt.Fprintf( w, `#if %[7]s CHECK_STRUCT_LAYOUT(berberis::%[2]s, %[3]d, %[4]d); #if !defined(BERBERIS_%[1]s) CHECK_STRUCT_LAYOUT(::%[2]s, %[3]d, %[4]d); #endif /* BERBERIS_%[1]s */ #elif %[8]s CHECK_STRUCT_LAYOUT(berberis::%[2]s, %[5]d, %[6]d); #if !defined(BERBERIS_%[1]s) CHECK_STRUCT_LAYOUT(::%[2]s, %[5]d, %[6]d); #endif /* BERBERIS_%[1]s */ #else #error Unsupported architecture. #endif `, toEnumNameWithSuffix(name, "NOVERIFY"), name, typе.Bits(host_arch), typе.Align(host_arch), typе.Bits(guest_arch), typе.Align(guest_arch), cpp_types.Define(host_arch), cpp_types.Define(guest_arch)) if err != nil { return err } } return nil } func printEnums(w io.Writer, sorted_type_names []string, types map[string]cpp_types.Type) (err error) { for _, name := range sorted_type_names { typе := types[name] // Note: currently enums in vk.xml are architecture-agnostic. If some type is enum then it's always enum, on all // architecturs. And base type doesn't depend on the architecture either. err := checkEnumConsistency(typе, name) if err != nil { return err } if !isEnum(typе) { continue } _, err = fmt.Fprintf(w, "enum %s : %s {\n", name, typе.Elem(cpp_types.FirstArch).Name(cpp_types.FirstArch)) if err != nil { return err } for i := uint(0); i < typе.NumField(cpp_types.FirstArch); i++ { field := typе.Field(i, cpp_types.FirstArch).(cpp_types.EnumFieldInfo) if field.Alias() == "" { _, err = fmt.Fprintf(w, " BERBERIS_%s = %d,\n", field.Name(), field.Value()) if err != nil { return err } } } for i := uint(0); i < typе.NumField(cpp_types.FirstArch); i++ { field := typе.Field(i, cpp_types.FirstArch).(cpp_types.EnumFieldInfo) if field.Alias() != "" { _, err = fmt.Fprintf(w, " BERBERIS_%s = BERBERIS_%s,\n", field.Name(), field.Alias()) if err != nil { return err } } } var maximum_value string if isInt32T(typе.Elem(cpp_types.FirstArch)) { maximum_value = "0x7FFF'FFFF" } else if isUInt32T(typе.Elem(cpp_types.FirstArch)) { maximum_value = "0xFFFF'FFFFU" } else if isInt64T(typе.Elem(cpp_types.FirstArch)) { maximum_value = "0x7FFF'FFFF'FFFF'FFFFULL" } else { return errors.New("Unknown enum base type definitions for type " + name) } _, err = fmt.Fprintf(w, " BERBERIS_%s = %s\n};\n\n", toEnumNameWithSuffix(name, "MAX_ENUM"), maximum_value) if err != nil { return err } } return nil } func printEnumVerification(w io.Writer, sorted_type_names []string, types map[string]cpp_types.Type, host_arch, guest_arch cpp_types.Arch) error { for _, name := range sorted_type_names { typе := types[name] // Note: currently enums in vk.xml are architecture-agnostic. If some type is enum then it's always enum, on all // architectures. And base type doesn't depend on the architecture either. err := checkEnumConsistency(typе, name) if err != nil { return err } if !isEnum(typе) { continue } _, err = fmt.Fprintf( w, `#if %[7]s CHECK_STRUCT_LAYOUT(berberis::%[2]s, %[3]d, %[4]d); #elif %[8]s CHECK_STRUCT_LAYOUT(berberis::%[2]s, %[5]d, %[6]d); #else #error Unsupported architecture. #endif #if !defined(BERBERIS_%[1]s) #if %[7]s CHECK_STRUCT_LAYOUT(::%[2]s, %[3]d, %[4]d); #elif %[8]s CHECK_STRUCT_LAYOUT(::%[2]s, %[5]d, %[6]d); #else #error Unsupported architecture. #endif `, toEnumNameWithSuffix(name, "NOVERIFY"), name, typе.Bits(host_arch), typе.Align(host_arch), typе.Bits(guest_arch), typе.Align(guest_arch), cpp_types.Define(host_arch), cpp_types.Define(guest_arch)) if err != nil { return err } for i := uint(0); i < typе.NumField(cpp_types.FirstArch); i++ { field := typе.Field(i, cpp_types.FirstArch).(cpp_types.EnumFieldInfo) _, err = fmt.Fprintf( w, `#if !defined(BERBERIS_%[1]s_NOVERIFY) static_assert(std::int64_t(%[1]s) == std::int64_t(berberis::BERBERIS_%[1]s)); #endif `, field.Name()) if err != nil { return err } } _, err = fmt.Fprintf( w, `#if !defined(BERBERIS_%[2]s_NOVERIFY) static_assert(std::int64_t(%[2]s) == std::int64_t(berberis::BERBERIS_%[2]s)); #endif /* BERBERIS_%[2]s_NOVERIFY */ #endif /* BERBERIS_%[1]s */ `, toEnumNameWithSuffix(name, "NOVERIFY"), toEnumNameWithSuffix(name, "MAX_ENUM")) if err != nil { return err } } return nil } // Note: currently enums in vk.xml are architecture-agnostic. If some type is enum then it's always enum, on all // architectures. And base type doesn't depend on the architecture either. // // Generators (above) rely on that property. func checkEnumConsistency(typе cpp_types.Type, name string) error { if typе.Kind(cpp_types.FirstArch) != cpp_types.Enum { for arch := cpp_types.FirstArch + 1; arch <= cpp_types.LastArch; arch++ { if typе.Kind(arch) == cpp_types.Enum { return errors.New("Inconsistent types definitions for type " + name) } } return nil } if !isInputCompatible(typе, cpp_types.X86, cpp_types.Arm) || !isInputCompatible(typе, cpp_types.Arm64, cpp_types.X86_64) { return errors.New("Inconsistent types definitions for type " + name) } return nil } func printEnumAliases(w io.Writer, sorted_type_names []string, types map[string]cpp_types.Type) (err error) { // Note: currently enums in vk.xml are architecture-agnostic. If some type is enum then it's always enum, on all // architecturs. And base type doesn't depend on the architecture either. // // This simplifies the generation but we must verify that it's so to make sure this code would be adjusted in the // [very unlikely] case where vk.xml would be changed to violate these invariants. for _, name := range sorted_type_names { typе := types[name] if typе.Kind(cpp_types.FirstArch) != cpp_types.Alias || typе.Elem(cpp_types.FirstArch).Kind(cpp_types.FirstArch) != cpp_types.Enum { for arch := cpp_types.FirstArch; arch <= cpp_types.LastArch; arch++ { if typе.Kind(arch) == cpp_types.Alias && typе.Elem(cpp_types.FirstArch).Kind(cpp_types.FirstArch) == cpp_types.Enum { return errors.New("Inconsistest types definitions for type " + name) } } continue } else { for arch := cpp_types.FirstArch; arch <= cpp_types.LastArch; arch++ { if typе.Kind(arch) != cpp_types.Alias || typе.Elem(cpp_types.FirstArch).Kind(cpp_types.FirstArch) != cpp_types.Enum { return errors.New("Inconsistest types definitions for type " + name) } if typе.Elem(cpp_types.FirstArch).Name(cpp_types.FirstArch) != typе.Elem(arch).Name(arch) { return errors.New("Inconsistest enum alias base type definitions for type " + name) } } } fmt.Fprintf(w, "using %s = %s;\n\n", name, typе.Elem(cpp_types.FirstArch).Name(cpp_types.FirstArch)) } return nil } func sortStructTypes(sorted_type_names []string, types map[string]cpp_types.Type) (sorted_struct_types_names []string, err error) { next_struct_names_list := []string{} for _, name := range sorted_type_names { typе := types[name] if !isStruct(typе) && !isUnion(typе) { continue } ids := typе.NumField(cpp_types.FirstArch) for arch := cpp_types.FirstArch + 1; arch <= cpp_types.LastArch; arch++ { if typе.Kind(cpp_types.FirstArch) != typе.Kind(arch) { return nil, errors.New("Inconsistent struct \"" + name + "\"") } if ids != typе.NumField(arch) { return nil, errors.New("Inconsistent struct \"" + name + "\"") } for id := uint(0); id < ids; id++ { if typе.Field(id, cpp_types.FirstArch).Type().Name(cpp_types.FirstArch) != typе.Field(id, arch).Type().Name(arch) { return nil, errors.New("Inconsistent struct \"" + name + "\"") } } } next_struct_names_list = append(next_struct_names_list, name) } sorted_struct_types_names = []string{} declared_types := make(map[string]cpp_types.Type) var struct_names_list []string for len(next_struct_names_list) > 0 { // If next list is the same as previous one then we have some kind of loop and types couldn't be defined. if len(struct_names_list) == len(next_struct_names_list) { return nil, errors.New("Cannot make any progress: type \"" + struct_names_list[0] + "\" refers to undefined type") } struct_names_list = next_struct_names_list next_struct_names_list = []string{} type_list: for _, name := range struct_names_list { typе := types[name] ids := typе.NumField(cpp_types.FirstArch) for id := uint(0); id < ids; id++ { field_type := typе.Field(id, cpp_types.FirstArch).Type() if !areBaseTypesDeclared(field_type, declared_types) { next_struct_names_list = append(next_struct_names_list, name) continue type_list } } sorted_struct_types_names = append(sorted_struct_types_names, name) declared_types[typе.Name(cpp_types.FirstArch)] = typе } } return sorted_struct_types_names, nil } func areBaseTypesDeclared(typе cpp_types.Type, declared_types map[string]cpp_types.Type) bool { if typе.Name(cpp_types.FirstArch) == "struct VkBaseInStructure" || typе.Name(cpp_types.FirstArch) == "struct VkBaseOutStructure" { return true } for arch := cpp_types.FirstArch; arch <= cpp_types.LastArch; arch++ { switch typе.Kind(arch) { // If struct or union type is used then we need to have it declared. // This is true both for Host and Guest types. case cpp_types.Struct, cpp_types.Union: if _, ok := declared_types[typе.Name(arch)]; !ok { return false } // Aliases, Arrays, Consts don't change anything case cpp_types.Alias, cpp_types.Array, cpp_types.Const: if !areBaseTypesDeclared(typе.Elem(arch), declared_types) { return false } // Pointers can be used with opaque structs, but it doesn't work if need to perform custom Host/Guest conversions. case cpp_types.Ptr: if !areBaseTypesDeclared(typе.Elem(arch), declared_types) { return false } } } return true } func printHostStructTypes(w io.Writer, sorted_type_names []string, types map[string]cpp_types.Type) (err error) { for _, name := range sorted_type_names { if isStruct(types[name]) { _, err = fmt.Fprintf(w, "struct %s;\n\n", name) } else { _, err = fmt.Fprintf(w, "union %s;\n\n", name) } if err != nil { return err } } for _, name := range sorted_type_names { typе := types[name] name := typе.Name(cpp_types.FirstArch) _, err = fmt.Fprintf(w, "%s {\n", name) if err != nil { return err } ids := typе.NumField(cpp_types.FirstArch) for id := uint(0); id < ids; id++ { field_name := typе.Field(id, cpp_types.FirstArch).Name() field_type := typе.Field(id, cpp_types.FirstArch).Type() if isPtrToOpaque(field_type) { // Assume opaque types are structs. _, err = fmt.Fprintf(w, " struct %s;\n", field_type.DeclareVar(field_name, cpp_types.FirstArch)) } else if isPtrToConstOpaque(field_type) { // Assume opaque types are structs. _, err = fmt.Fprintf(w, " const struct %s;\n", cpp_types.PointerType(field_type.Elem(cpp_types.FirstArch).Elem(cpp_types.FirstArch)).DeclareVar(field_name, cpp_types.FirstArch)) } else if isPtrToFunc(field_type) { // Declare functions with BERBERIS_VKAPI_PTR attribute. // This is needed if we want to use these on ARM platform because default Android API is // “aapcs” but Vulkan needs “aapcs-vfp”. _, err = fmt.Fprintf(w, " %s;\n", field_type.Elem(cpp_types.FirstArch).DeclareVar( fmt.Sprintf("(BERBERIS_VKAPI_PTR *%s)", field_name), cpp_types.FirstArch)) } else if isConstPtrToFunc(field_type) { // Declare functions with BERBERIS_VKAPI_PTR attribute. // This is needed if we want to use these on ARM platform because default Android API is // “aapcs” but Vulkan needs “aapcs-vfp”. _, err = fmt.Fprintf(w, " %s;\n", field_type.Elem(cpp_types.FirstArch).Elem(cpp_types.FirstArch).DeclareVar( fmt.Sprintf("(BERBERIS_VKAPI_PTR *%sconst)", field_name), cpp_types.FirstArch)) } else { _, err = fmt.Fprintf(w, " %s;\n", field_type.DeclareVar(field_name, cpp_types.FirstArch)) } if err != nil { return err } } _, err = fmt.Fprintf(w, "};\n\n") if err != nil { return err } } return nil } func printFunctionPointerTypes(w io.Writer, sorted_command_names []string, commands map[string]cpp_types.Type) (err error) { for _, name := range sorted_command_names { _, err = fmt.Fprintf(w, "using PFN_%s = %s;\n", name, commands[name].DeclareVar("(BERBERIS_VKAPI_PTR*)", cpp_types.FirstArch)) if err != nil { return err } } return nil } func getRequiredConversions(commands map[string]cpp_types.Type, types map[string]cpp_types.Type) (conversion map[string]*NeededConvertor, err error) { conversion = make(map[string]*NeededConvertor) for name, _ := range types { conversion[name] = &NeededConvertor{ need_base_convertor: false, need_array_convertor: false, need_const_convertor: false, need_const_array_convertor: false, } } // We need convertors for types referred in functions (commands in Vulkan-speak) and all // types which they refer too (transitively). // // We always have to handle all of them symmetrically because Layers require conversions // in both directions. for _, command := range commands { for i := uint(0); i < command.NumField(cpp_types.FirstArch); i++ { param_type := command.Field(i, cpp_types.FirstArch).Type() if isPtrToAlias(param_type) { param_type = cpp_types.PointerType(param_type.Elem(cpp_types.FirstArch).Elem(cpp_types.FirstArch)) } if isPtrToConstAlias(param_type) { param_type = cpp_types.PointerType(cpp_types.ConstType(param_type.Elem(cpp_types.FirstArch).Elem(cpp_types.FirstArch).Elem(cpp_types.FirstArch))) } param_length := command.Field(i, cpp_types.FirstArch).BaseFieldInfo().(vulkan_xml.ExtendedFieldInfo).Length() if isPtrToConstStruct(param_type) { base_name := param_type.Elem(cpp_types.FirstArch).Elem(cpp_types.FirstArch).BaseName(cpp_types.FirstArch) if param_length == nil { conversion[base_name].need_const_convertor = true } else if isPtr(param_length.Type()) { return nil, errors.New("Unsupported combination of types for command") } else { if base_name == "VkDescriptorBufferBindingInfoEXT" { // TODO(b/322902400): Make VkDescriptorBufferBindingInfoEXT work with berberis. } else { conversion[base_name].need_const_array_convertor = true } } } else if isPtrToStruct(param_type) { base_name := param_type.Elem(cpp_types.FirstArch).BaseName(cpp_types.FirstArch) if param_length == nil { if base_name == "VkGetLatencyMarkerInfoNV" { // TODO(b/322902403): Make VkGetLatencyMarkerInfoNV work with berberis. } else { conversion[base_name].need_base_convertor = true } } else if isPtr(param_length.Type()) { conversion[base_name].need_array_convertor = true } else { return nil, errors.New("Unsupported combination of types for command") } } } } for { attributes_changed := false for name, typе := range types { if !isStruct(typе) { continue } ids := typе.NumField(cpp_types.FirstArch) field_types := []cpp_types.Type{} field_lengths := []cpp_types.FieldInfo{} for id := uint(0); id < ids; id++ { field_type := typе.Field(id, cpp_types.FirstArch).Type() if isUnion(field_type) { union_ids := field_type.NumField(cpp_types.FirstArch) for union_id := uint(0); union_id < union_ids; union_id++ { field_types = append(field_types, field_type.Field(union_id, cpp_types.FirstArch).Type()) field_lengths = append(field_lengths, field_type.Field(union_id, cpp_types.FirstArch).BaseFieldInfo().(vulkan_xml.ExtendedFieldInfo).Length()) } } else { field_types = append(field_types, field_type) field_lengths = append(field_lengths, typе.Field(id, cpp_types.FirstArch).BaseFieldInfo().(vulkan_xml.ExtendedFieldInfo).Length()) } } if len(typе.(vulkan_xml.ExtendedStructInfo).ExtendedWith()) > 0 { if typе.Field(0, cpp_types.FirstArch).Name() != "sType" { return nil, errors.New("Extensible data structure without sType") } if typе.Field(1, cpp_types.FirstArch).Name() != "pNext" { return nil, errors.New("Extensible data structure without pNext") } for _, extra_type := range typе.(vulkan_xml.ExtendedStructInfo).ExtendedWith() { if isPtrToConst(typе.Field(1, cpp_types.FirstArch).Type()) { field_types = append(field_types, cpp_types.PointerType(cpp_types.ConstType(extra_type))) } else { field_types = append(field_types, cpp_types.PointerType(extra_type)) } field_lengths = append(field_lengths, nil) } } for id, field_type := range field_types { field_length := field_lengths[id] if isPtrToAlias(field_type) { field_type = cpp_types.PointerType(field_type.Elem(cpp_types.FirstArch).Elem(cpp_types.FirstArch)) } if isPtrToConstAlias(field_type) { field_type = cpp_types.PointerType(cpp_types.ConstType(field_type.Elem(cpp_types.FirstArch).Elem(cpp_types.FirstArch).Elem(cpp_types.FirstArch))) } if isPtrToConstStruct(field_type) { base_type := field_type.Elem(cpp_types.FirstArch).Elem(cpp_types.FirstArch) base_name := base_type.BaseName(cpp_types.FirstArch) if field_length == nil { if (conversion[name].need_const_convertor || conversion[name].need_const_array_convertor) && !conversion[base_name].need_const_convertor { if base_name == "VkFaultCallbackInfo" { // TODO(b/322902053): Make VkFaultCallbackInfo work with berberis. } else { conversion[base_name].need_const_convertor = true attributes_changed = true } } // Optional data structures may be used both as inputs and outputs thus we can not rely on the constness of the "Next" pointer. if conversion[name].need_base_convertor && !conversion[name].need_array_convertor && base_type.(vulkan_xml.ExtendedStructInfo).OptionalStruct() { if !conversion[base_name].need_base_convertor { conversion[base_name].need_base_convertor = true attributes_changed = true } } else if conversion[name].need_base_convertor || conversion[name].need_array_convertor { return nil, errors.New("Unsupported combination of types for struct") } } else if isPtr(field_length.Type()) { return nil, errors.New("Unsupported combination of types for struct") } else { if (conversion[name].need_const_convertor || conversion[name].need_const_array_convertor) && !conversion[base_name].need_const_array_convertor { conversion[base_name].need_const_array_convertor = true attributes_changed = true } else if conversion[name].need_base_convertor || conversion[name].need_array_convertor { return nil, errors.New("Unsupported combination of types for struct") } } } else if isPtrToStruct(field_type) { base_type := field_type.Elem(cpp_types.FirstArch) base_name := base_type.BaseName(cpp_types.FirstArch) if base_name == "VkDescriptorBufferBindingPushDescriptorBufferHandleEXT" || base_name == "VkDrmFormatModifierProperties2EXT" || base_name == "VkDrmFormatModifierPropertiesEXT" || base_name == "VkRenderPassCreationFeedbackInfoEXT" || base_name == "VkRenderPassSubpassFeedbackInfoEXT" { // TODO(b/171255170): Process the optional structures correctly. } else if base_name == "VkPipelineCreationFeedback" { // VkPipelineCreationFeedback is referred from input data structure VkPipelineCreationFeedbackCreateInfo if !conversion[base_name].need_base_convertor { conversion[base_name].need_base_convertor = true attributes_changed = true } if !conversion[base_name].need_array_convertor { conversion[base_name].need_array_convertor = true attributes_changed = true } } else if field_length == nil { if (conversion[name].need_base_convertor || conversion[name].need_array_convertor) && !conversion[base_name].need_base_convertor { conversion[base_name].need_base_convertor = true attributes_changed = true } // Optional data structures may be used both as inputs and outputs thus we can not rely on the constness of the "Next" pointer. if conversion[name].need_const_convertor && !conversion[name].need_const_array_convertor && base_type.(vulkan_xml.ExtendedStructInfo).OptionalStruct() { if !conversion[base_name].need_const_convertor { conversion[base_name].need_const_convertor = true attributes_changed = true } } else if conversion[name].need_const_convertor || conversion[name].need_const_array_convertor { return nil, errors.New("Unsupported combination of types for struct") } } else { // We don't even try to handle nested out structures (except one case above). if conversion[name].need_const_convertor || conversion[name].need_base_convertor || conversion[name].need_const_array_convertor || conversion[name].need_array_convertor { return nil, errors.New("Unsupported combination of types for struct") } } } } } if !attributes_changed { break } } return conversion, nil } type NeededConvertor struct { need_base_convertor bool need_array_convertor bool need_const_convertor bool need_const_array_convertor bool } func printGuestStructTypes(w io.Writer, sorted_type_names []string, types map[string]cpp_types.Type, conversion map[string]*NeededConvertor, host_arch, guest_arch cpp_types.Arch) (err error) { for _, name := range sorted_type_names { typе := types[name] if isInputCompatible(typе, host_arch, guest_arch) { continue } // The goal is to resolve issue with recursive structures like // VkBaseInStructure/VkBaseOutStructure. // // These structures include pNext type which is not pointer to void // (as usual), but point to the structures of the same type. // // Thus we have the following definition: // // template<> // class GuestType { // public: // using Type = VkBaseOutStructure; // // GuestType sType; // GuestType pNext; // }; // // It can only be processed if “class GuestType” is defined // before “class GuestType”, but then we would have problem // with all conversion routines for “class GuestType” since // these require access to “class GuestType”! // // Resolution is to declare “class GuestType”, then declare // “class GuestType” and only then declare conversion routines. // err = printGuestStructPointerType(w, name, typе, host_arch, guest_arch, conversion) if err != nil { return err } err = printGuestStructType(w, name, typе, host_arch, guest_arch) if err != nil { return err } err = printGuestStructPointerConversionsType(w, name, typе, host_arch, guest_arch, conversion) if err != nil { return err } } return nil } func printGuestStructType(w io.Writer, name string, typе cpp_types.Type, host_arch cpp_types.Arch, guest_arch cpp_types.Arch) error { // Note: it's not possible to pass struct as an out argument in C (possible with C++ and references, but not with C) // and it looks as if GuestType is not needed (arguments of "const StructType" and "StructType" // are identical in C). // // But we need GuestType and GuestType to handle the following corners case: // struct Foo { // … ← here we have pointers, optional extensions or other complex data. // }; // struct Bar { // Foo foo; // }; // void baz(Bar* bar); // // Here function baz returns struct “struct Bar” (using pointer argument) which embeds “struct Foo”. // Note that these structs can be nested few levels in depth. // // Here we effectively return value of type Foo (even if that's not possible, strictly speaking, in C) and the // simplest way is to keep origin and do conversion in destructor. // // That's not very efficient (we are keeping address which can, actually, be calculated) but that's simpler and, // thankfully, that's rare corner case, not the norm. for _, cоnst := range []string{"", "const "} { _, err := fmt.Fprintf(w, "template<>\nclass GuestType<%[1]s%[2]s> {\n public:\n using Type = %[1]s%[2]s;\n\n", cоnst, name) if err != nil { return err } if isUnion(typе) { _, err := fmt.Fprintln(w, " union Union {") if err != nil { return err } } ids := typе.NumField(cpp_types.FirstArch) for id := uint(0); id < ids; id++ { field_name := typе.Field(id, cpp_types.FirstArch).Name() field_type := typе.Field(id, cpp_types.FirstArch).Type() if cоnst == "const " { if isArray(field_type) { field_type = cpp_types.ArrayType(cpp_types.ConstType(field_type.Elem(cpp_types.FirstArch)), field_type.NumField(cpp_types.FirstArch)) } else { field_type = cpp_types.ConstType(field_type) } } if isPtrToOpaque(field_type) { // Assume opaque types are structs. _, err = fmt.Fprintf(w, " GuestType<%sstruct %s> %s;\n", cоnst, field_type.Name(cpp_types.FirstArch), field_name) } else if isPtrToConstOpaque(field_type) { // Assume opaque types are structs. _, err = fmt.Fprintf(w, " GuestType %s;\n", cpp_types.PointerType(field_type.Elem(cpp_types.FirstArch).Elem(cpp_types.FirstArch)).Name(cpp_types.FirstArch), field_name) } else if isPtrToFunc(field_type) { // Declare functions with BERBERIS_VKAPI_PTR attribute. // This is needed if we want to use these on ARM platform because default Android API is // “aapcs” but Vulkan needs “aapcs-vfp”. _, err = fmt.Fprintf(w, " GuestType<%s> %s;\n", field_type.Elem(cpp_types.FirstArch).DeclareVar( "(BERBERIS_VKAPI_PTR*)", cpp_types.FirstArch), field_name) } else if isConstPtrToFunc(field_type) { // Declare functions with BERBERIS_VKAPI_PTR attribute. // This is needed if we want to use these on ARM platform because default Android API is // “aapcs” but Vulkan needs “aapcs-vfp”. _, err = fmt.Fprintf(w, " GuestType<%s> %s;\n", field_type.Elem(cpp_types.FirstArch).Elem(cpp_types.FirstArch).DeclareVar( "(BERBERIS_VKAPI_PTR*const)", cpp_types.FirstArch), field_name) } else if isArray(field_type) { elem := field_type.Elem(cpp_types.FirstArch) if isArray(elem) || isPtr(elem) { return errors.New("Array of complex type: " + field_type.Name(cpp_types.FirstArch)) } _, err = fmt.Fprintf(w, " GuestType<%s> %s[%d];\n", field_type.Elem(cpp_types.FirstArch).Name(cpp_types.FirstArch), field_name, field_type.NumField(cpp_types.FirstArch)) } else if field_type.Align(host_arch) != field_type.Align(guest_arch) { _, err = fmt.Fprintf(w, " alignas(%d) GuestType<%s> %s;\n", field_type.Align(guest_arch)/8, field_type.Name(cpp_types.FirstArch), field_name) } else { _, err = fmt.Fprintf(w, " GuestType<%s> %s;\n", field_type.Name(cpp_types.FirstArch), field_name) } if err != nil { return err } } if isUnion(typе) { // Make the uniоn default-constructible by initializing first field. We need it to support holders. _, err := fmt.Fprintf(w, " } uniоn = { .%s = {} };\n", typе.Field(0, cpp_types.FirstArch).Name()) if err != nil { return err } } _, err = fmt.Fprint(w, "};\n\n") if err != nil { return err } } return nil } func printGuestStructPointerType(w io.Writer, name string, typе cpp_types.Type, host_arch cpp_types.Arch, guest_arch cpp_types.Arch, conversion map[string]*NeededConvertor) error { // Naked unions are never passed around in Vulkan. They are always embedded into structure with selector field. if isUnion(typе) { return nil } // We are not trying to convert on ARM64 because we don't know if pointer is valid (and if all extensions are compatible). // On ARM32 we need to convert some data structures, but tests pass because of quirk of how they are run. // TODO(b/274875580): fix properly. if guest_arch == cpp_types.Arm64 && name == "VkCommandBufferInheritanceInfo" { return nil } _, err := fmt.Fprintf(w, `template<> class GuestType<%[1]s*> { public: using Type = %[1]s*; class GuestHolder; class GuestArrayHolder; class HostHolder; class HostArrayHolder; GuestType(%[1]s* const new_value, GuestHolder& holder, bool& out_of_memory); GuestType(%[1]s* const new_value, GuestArrayHolder& holder, const std::uint32_t* size, bool& out_of_memory); GuestType(GuestType<%[1]s>* const& new_value); GuestType(GuestType<%[1]s>*&& new_value); GuestType() = default; GuestType(const GuestType&) = default; GuestType(GuestType&&) = default; GuestType& operator=(const GuestType& data) = default; GuestType& operator=(GuestType&& data) = default; ~GuestType() = default; `, name) if err != nil { return err } if conversion[name].need_base_convertor { _, err = fmt.Fprintf(w, " friend %[1]s* ToHostType(const GuestType&, HostHolder& holder, bool& out_of_memory);\n", name) if err != nil { return err } } if conversion[name].need_array_convertor { _, err = fmt.Fprintf(w, " friend %[1]s* ToHostType(const GuestType&, HostArrayHolder& holder, const std::uint32_t* size, bool& out_of_memory);\n", name) if err != nil { return err } } _, err = fmt.Fprintf(w, ` friend GuestAddr ToGuestAddr(const GuestType& guest_type) { return guest_type.value_; } private: GuestAddr value_; }; `) if err != nil { return err } _, err = fmt.Fprintf(w, `template<> class GuestType { public: using Type = const %[1]s*; class GuestHolder; class GuestArrayHolder; class HostHolder; class HostArrayHolder; GuestType(const %[1]s* const new_value, GuestHolder& holder, bool& out_of_memory); `, name) if err != nil { return err } if name == "VkAccelerationStructureGeometryKHR" { _, err = fmt.Fprintf(w, " GuestType(const VkAccelerationStructureGeometryKHR* const new_value, const VkAccelerationStructureGeometryKHR* const* const new_value_ptr, GuestArrayHolder& holder, std::size_t size, bool& out_of_memory);\n") } else { _, err = fmt.Fprintf(w, " GuestType(const %[1]s* const new_value, GuestArrayHolder& holder, std::size_t size, bool& out_of_memory);\n", name) } if err != nil { return err } _, err = fmt.Fprintf(w, ` GuestType(GuestType* const& new_value); GuestType(GuestType*&& new_value); GuestType(GuestType<%[1]s>* const& new_value); GuestType(GuestType<%[1]s>*&& new_value); GuestType() = default; GuestType(const GuestType&) = default; GuestType(GuestType&&) = default; GuestType& operator=(const GuestType& data) = default; GuestType& operator=(GuestType&& data) = default; ~GuestType() = default; `, name) if err != nil { return err } if conversion[name].need_const_convertor { if name == "VkCommandBufferBeginInfo" { _, err = fmt.Fprintf(w, " friend const %[1]s* ToHostType(const GuestType&, HostHolder& holder, bool has_inheritance_info, bool& out_of_memory);\n", name) } else { _, err = fmt.Fprintf(w, " friend const %[1]s* ToHostType(const GuestType&, HostHolder& holder, bool& out_of_memory);\n", name) } if err != nil { return err } } if name == "VkAccelerationStructureGeometryKHR" { _, err = fmt.Fprintf(w, " friend const %[1]s* ToHostType(const GuestType& new_value, GuestType const new_value_ptr, HostArrayHolder& holder, std::size_t size, bool& out_of_memory);\n", name) } else if conversion[name].need_const_array_convertor { _, err = fmt.Fprintf(w, " friend const %[1]s* ToHostType(const GuestType&, HostArrayHolder& holder, std::size_t size, bool& out_of_memory);\n", name) } if err != nil { return err } _, err = fmt.Fprintf(w, ` friend GuestAddr ToGuestAddr(const GuestType& guest_type) { return guest_type.value_; } private: GuestAddr value_; }; `) if err != nil { return err } return nil } func printGuestStructPointerConversionsType(w io.Writer, name string, typе cpp_types.Type, host_arch cpp_types.Arch, guest_arch cpp_types.Arch, conversion map[string]*NeededConvertor) error { // Naked unions are never passed around in Vulkan. They are always embedded into structure with selector field. if isUnion(typе) { return nil } // We are not trying to convert on ARM64 because we don't know if pointer is valid (and if all extensions are compatible). // On ARM32 we need to convert some data structures, but tests pass because of quirk of how they are run. // TODO(b/274875580): fix properly. if guest_arch == cpp_types.Arm64 && name == "VkCommandBufferInheritanceInfo" { return nil } // Any data structure which have sType fields and pNext fields can be, potentially, expanded: // https://github.com/KhronosGroup/Vulkan-Guide/blob/main/chapters/pnext_and_stype.adoc // We support extensions with data structures we are familiar with, but not unknown ones. extensible_type := isExtensibleType(typе) _, err := fmt.Fprintf(w, `inline GuestType<%[1]s*>::GuestType(GuestType<%[1]s>* const& new_value) : value_(ToGuestAddr(new_value)) {} inline GuestType<%[1]s*>::GuestType(GuestType<%[1]s>*&& new_value) : value_(ToGuestAddr(new_value)) {} inline GuestType::GuestType(GuestType* const& new_value) : value_(ToGuestAddr(new_value)) {} inline GuestType::GuestType(GuestType*&& new_value) : value_(ToGuestAddr(new_value)) {} inline GuestType::GuestType(GuestType<%[1]s>* const& new_value) : value_(ToGuestAddr(new_value)) {} inline GuestType::GuestType(GuestType<%[1]s>*&& new_value) : value_(ToGuestAddr(new_value)) {} `, name) if err != nil { return err } if conversion[name].need_base_convertor { if typе.(vulkan_xml.ExtendedStructInfo).OptionalStruct() { _, err = fmt.Fprintf(w, `class GuestType<%[1]s*>::GuestHolder : public GuestHolderBase { public: virtual ~GuestHolder() override; `, name) } else { _, err = fmt.Fprintf(w, `class GuestType<%[1]s*>::GuestHolder { public: ~GuestHolder(); `, name) } if err != nil { return err } holder := "" if extensible_type { holder = "std::unique_ptr extensions_;\n " } holder += "GuestType<" + name + "> data_;" if isInputCompatible(typе, host_arch, guest_arch) { _, err = fmt.Fprintf(w, ` friend GuestType<%[1]s*>; private: }; inline GuestType<%[1]s*>::GuestHolder::~GuestHolder() { } inline GuestType<%[1]s*>::GuestType(%[1]s* const new_value, GuestType<%[1]s*>::GuestHolder&, bool&) : value_(bit_cast(new_value)) { } `, name) } else { _, err = fmt.Fprintf(w, ` friend GuestType<%[1]s*>; private: friend GuestType ConvertOptionalStructures(void* head, std::unique_ptr& holder, bool& out_of_memory); friend GuestType ConvertOptionalStructures(const void* head, std::unique_ptr& holder, bool& out_of_memory); %[1]s* origin_ = nullptr; %[2]s }; inline GuestType<%[1]s*>::GuestHolder::~GuestHolder() { *origin_ = { %[3]s }; } inline GuestType<%[1]s*>::GuestType(%[1]s* const new_value, GuestType<%[1]s*>::GuestHolder& holder, [[maybe_unused]] bool& out_of_memory) : value_(new_value == nullptr ? kNullGuestAddr : ToGuestAddr(&holder.data_)) { holder.origin_ = new_value; holder.data_ = { %[4]s }; } `, name, strings.Join(append( makeHolderList("GuestType<%s>::Guest%sHolder %s_holder_;", false, typе, host_arch, guest_arch), holder), "\n "), strings.Join(makeHostInitializerList(initializeDataMembers, name, typе, host_arch, guest_arch), "\n "), strings.Join(makeGuestInitializerList(initializePointers, name, typе, host_arch, guest_arch), "\n ")) } if err != nil { return err } if typе.(vulkan_xml.ExtendedStructInfo).OptionalStruct() { _, err = fmt.Fprintf(w, `class GuestType<%[1]s*>::HostHolder: HostHolderBase { public: virtual ~HostHolder() override; `, name) } else { _, err = fmt.Fprintf(w, `class GuestType<%[1]s*>::HostHolder { public: ~HostHolder(); `, name) } if err != nil { return err } holder = "" if extensible_type { holder = "std::unique_ptr extensions_;\n " } holder += name + " data_;" extra_holder_argument := "" if name == "VkCommandBufferBeginInfo" { extra_holder_argument = "bool has_inheritance_info, " } if isInputCompatible(typе, host_arch, guest_arch) { _, err = fmt.Fprintf(w, ` friend %[1]s* ToHostType(const GuestType<%[1]s*>& new_value, GuestType<%[1]s*>::HostHolder& holder, bool& out_of_memory); private: }; inline GuestType<%[1]s*>::HostHolder::~HostHolder() { } inline %[1]s* ToHostType(const GuestType<%[1]s*>& new_value, GuestType<%[1]s*>::HostHolder&, bool&) { return ToHostAddr<%[1]s>(ToGuestAddr(new_value)); } `, name) } else { _, err = fmt.Fprintf(w, ` friend %[1]s* ToHostType(const GuestType<%[1]s*>& new_value, GuestType<%[1]s*>::HostHolder& holder, bool& out_of_memory); private: friend void* ConvertOptionalStructures(GuestType head, std::unique_ptr& holder, bool& out_of_memory); friend const void* ConvertOptionalStructures(GuestType head, std::unique_ptr& holder, bool& out_of_memory); GuestType<%[1]s>* origin_ = nullptr; %[2]s }; inline GuestType<%[1]s*>::HostHolder::~HostHolder() { *origin_ = { %[3]s }; } inline %[1]s* ToHostType(const GuestType<%[1]s*>& new_value, GuestType<%[1]s*>::HostHolder& holder, %[5]s[[maybe_unused]] bool& out_of_memory) { if (ToGuestAddr(new_value) == kNullGuestAddr) { return nullptr; } holder.origin_ = ToHostAddr>(ToGuestAddr(new_value)); holder.data_ = { %[4]s }; return &holder.data_; } `, name, strings.Join(append( makeHolderList("GuestType<%s>::Host%sHolder %s_holder_;", false, typе, host_arch, guest_arch), holder), "\n "), strings.Join(makeGuestInitializerList(initializeDataMembers, name, typе, host_arch, guest_arch), "\n "), strings.Join(makeHostInitializerList(initializePointers, name, typе, host_arch, guest_arch), "\n "), extra_holder_argument) } if err != nil { return err } } if conversion[name].need_array_convertor { holder := "" if extensible_type { holder = "std::unique_ptr extensions_;\n " } holder += "GuestType<" + name + ">* data_ = nullptr;" if isInputCompatible(typе, host_arch, guest_arch) { _, err = fmt.Fprintf(w, `class GuestType<%[1]s*>::GuestArrayHolder { public: ~GuestArrayHolder(); friend GuestType<%[1]s*>; private: }; inline GuestType<%[1]s*>::GuestArrayHolder::~GuestArrayHolder() { } inline GuestType<%[1]s*>::GuestType(%[1]s* const new_value, GuestType<%[1]s*>::GuestArrayHolder&, const std::uint32_t*, bool&) : value_(bit_cast(new_value)) { } `, name) } else { _, err = fmt.Fprintf(w, `class GuestType<%[1]s*>::GuestArrayHolder { public: ~GuestArrayHolder(); friend GuestType<%[1]s*>; private: friend GuestType ConvertOptionalStructures(void* head, std::unique_ptr& holder, bool& out_of_memory); friend GuestType ConvertOptionalStructures(const void* head, std::unique_ptr& holder, bool& out_of_memory); const std::uint32_t* size_ = nullptr; %[1]s* origin_ = nullptr; %[2]s }; inline GuestType<%[1]s*>::GuestArrayHolder::~GuestArrayHolder() { if (size_ == nullptr) { return; } for (std::size_t index = 0; index < *size_; ++index) { origin_[index] = { %[5]s }; } %[3]s } // Note: for output data structures combo where “*size == 0” and yet data pointer is not nullptr is both valid and different from situation where data pointer is nullptr. // Correct handling relies on obscure fact that “new Foo[0]” is also valid call in C++ and is guaranteed to produce non-nullptr pointer (which can not be dereferenced). inline GuestType<%[1]s*>::GuestType(%[1]s* const new_value, GuestType<%[1]s*>::GuestArrayHolder& holder, const std::uint32_t* size, [[maybe_unused]] bool& out_of_memory) : value_((new_value == nullptr) ? kNullGuestAddr : ToGuestAddr(holder.data_ = new (std::nothrow) GuestType<%[1]s>[*size])) { %[4]s for (std::size_t index = 0; index < *size; ++index) { ToHostAddr>(value_)[index] = { %[6]s }; } } `, name, strings.Join(append([]string{holder}, // Note %s after * is to intentionally trigger error if/when arrays to arrays would occur in the output data structures. // TODO: read the documentation and fix the code if that would ever happen. makeHolderList("GuestType<%s>::GuestHolder*%s %s_holder_ = nullptr;", false, typе, host_arch, guest_arch)...), "\n "), strings.Join(append( makeHolderList("delete[] %[3]s_holder_;", false, typе, host_arch, guest_arch), "delete[] data_;"), "\n "), strings.Join(append([]string{"holder.origin_ = new_value;\n if (value_ == kNullGuestAddr) {\n if (new_value != nullptr) {\n out_of_memory = true;\n }\n return;\n }\n holder.size_ = size;"}, makeHolderList("if ((holder.%[3]s_holder_ = new (std::nothrow) GuestType<%[1]s>::GuestHolder%[2]s[*size]) == nullptr) {\n out_of_memory = true;\n return;\n };", false, typе, host_arch, guest_arch)...), "\n "), strings.Join(makeHostInitializerListForArray(initializeDataMembers, name, typе, host_arch, guest_arch), "\n "), strings.Join(makeGuestInitializerListForArray(initializePointers, name, typе, host_arch, guest_arch), "\n ")) } if err != nil { return err } holder = "" if extensible_type { holder = "std::unique_ptr extensions_;\n " } holder += name + "* data_ = nullptr;" if isInputCompatible(typе, host_arch, guest_arch) { _, err = fmt.Fprintf(w, `class GuestType<%[1]s*>::HostArrayHolder { public: ~HostArrayHolder(); friend %[1]s* ToHostType(const GuestType<%[1]s*>& new_value, GuestType<%[1]s*>::HostArrayHolder& holder, const std::uint32_t* size, bool& out_of_memory); private: }; inline GuestType<%[1]s*>::HostArrayHolder::~HostArrayHolder() { } inline %[1]s* ToHostType(const GuestType<%[1]s*>& new_value, GuestType<%[1]s*>::HostArrayHolder&, const std::uint32_t*, bool&) { return ToHostAddr<%[1]s>(ToGuestAddr(new_value)); } `, name) } else { _, err = fmt.Fprintf(w, `class GuestType<%[1]s*>::HostArrayHolder { public: ~HostArrayHolder(); friend %[1]s* ToHostType(const GuestType<%[1]s*>& new_value, GuestType<%[1]s*>::HostArrayHolder& holder, const std::uint32_t* size, bool& out_of_memory); private: friend void* ConvertOptionalStructures(GuestType head, std::unique_ptr& holder, bool& out_of_memory); friend const void* ConvertOptionalStructures(GuestType head, std::unique_ptr& holder, bool& out_of_memory); const std::uint32_t* size_ = nullptr; GuestType<%[1]s>* origin_ = nullptr; %[2]s }; inline GuestType<%[1]s*>::HostArrayHolder::~HostArrayHolder() { if (size_ == nullptr) { return; } for (std::size_t index = 0; index < *size_; ++index) { origin_[index] = { %[5]s }; } %[3]s } inline %[1]s* ToHostType(const GuestType<%[1]s*>& new_value, GuestType<%[1]s*>::HostArrayHolder& holder, const std::uint32_t* size, bool& out_of_memory) { holder.origin_ = ToHostAddr>(ToGuestAddr(new_value)); %[4]s for (std::size_t index = 0; index < *size; ++index) { holder.data_[index] = { %[6]s }; } return holder.data_; } `, name, strings.Join(append([]string{holder}, // Note %s after * is to intentionally trigger error if/when arrays to arrays would occur in the output data structures. // TODO: read the documentation and fix the code if that would ever happen. makeHolderList("GuestType<%s>::HostHolder*%s %s_holder_ = nullptr;", false, typе, host_arch, guest_arch)...), "\n "), strings.Join(append( makeHolderList("delete[] %[3]s_holder_;", false, typе, host_arch, guest_arch), "delete[] data_;"), "\n "), strings.Join(append([]string{"if (ToGuestAddr(new_value) == kNullGuestAddr) {\n return nullptr;\n }\n holder.size_ = size;\n if ((holder.data_ = new (std::nothrow) " + name + "[*size]) == nullptr) {\n out_of_memory = true;\n return nullptr;\n }"}, makeHolderList("if ((holder.%[3]s_holder_ = new (std::nothrow) GuestType<%[1]s>::HostHolder%[2]s[*size]) == nullptr) {\n out_of_memory = true;\n return nullptr;\n };", false, typе, host_arch, guest_arch)...), "\n "), strings.Join(makeGuestInitializerListForArray(initializeDataMembers, name, typе, host_arch, guest_arch), "\n "), strings.Join(makeHostInitializerListForArray(initializePointers, name, typе, host_arch, guest_arch), "\n ")) } if err != nil { return err } } if conversion[name].need_const_convertor { if name == "VkAccelerationStructureBuildGeometryInfoKHR" { if isInputCompatible(typе, host_arch, guest_arch) { _, err = fmt.Fprintf(w, `class GuestType::GuestHolder { public: friend GuestType; private: }; inline GuestType::GuestType( const VkAccelerationStructureBuildGeometryInfoKHR* new_value, GuestType::GuestHolder&, bool&) : value_(bit_cast(new_value)) { } class GuestType::HostHolder { public: friend const VkAccelerationStructureBuildGeometryInfoKHR* ToHostType( const GuestType& new_value, GuestType::HostHolder& holder, bool& out_of_memory); private: }; inline const VkAccelerationStructureBuildGeometryInfoKHR* ToHostType( const GuestType& new_value, GuestType::HostHolder&, bool&) { return ToHostAddr(ToGuestAddr(new_value)); } `) } else { _, err = fmt.Fprintf(w, `class GuestType::GuestHolder { public: friend GuestType; private: friend GuestType ConvertOptionalStructures(void* head, std::unique_ptr& holder, bool& out_of_memory); friend GuestType ConvertOptionalStructures(const void* head, std::unique_ptr& holder, bool& out_of_memory); GuestType::GuestArrayHolder pGeometries_holder_; std::unique_ptr extensions_; GuestType data_; }; inline GuestType::GuestType( const VkAccelerationStructureBuildGeometryInfoKHR* new_value, GuestType::GuestHolder& holder, bool& out_of_memory) : value_(new_value == nullptr ? kNullGuestAddr : ToGuestAddr(&holder.data_)) { if (new_value != nullptr) { holder.data_ = { .sType = GuestType(new_value->sType), .pNext = ConvertOptionalStructures(new_value->pNext, holder.extensions_, out_of_memory), .type = GuestType(new_value->type), .flags = GuestType(new_value->flags), .mode = GuestType(new_value->mode), .srcAccelerationStructure = GuestType(new_value->srcAccelerationStructure), .dstAccelerationStructure = GuestType(new_value->dstAccelerationStructure), .geometryCount = GuestType(new_value->geometryCount), .pGeometries = GuestType(new_value->pGeometries, new_value->ppGeometries, holder.pGeometries_holder_, new_value->geometryCount, out_of_memory), .ppGeometries = GuestType(nullptr)}; } } class GuestType::HostHolder { public: friend const VkAccelerationStructureBuildGeometryInfoKHR* ToHostType( const GuestType& new_value, GuestType::HostHolder& holder, bool& out_of_memory); private: friend void* ConvertOptionalStructures(GuestType head, std::unique_ptr& holder, bool& out_of_memory); friend const void* ConvertOptionalStructures(GuestType head, std::unique_ptr& holder, bool& out_of_memory); GuestType::HostArrayHolder pGeometries_holder_; std::unique_ptr extensions_; VkAccelerationStructureBuildGeometryInfoKHR data_; }; inline const VkAccelerationStructureBuildGeometryInfoKHR* ToHostType( const GuestType& new_value, GuestType::HostHolder& holder, bool& out_of_memory) { if (ToGuestAddr(new_value) == kNullGuestAddr) { return nullptr; } holder.data_ = { .sType = VkStructureType( ToHostAddr>(ToGuestAddr(new_value)) ->sType), .pNext = ConvertOptionalStructures( ToHostAddr>(ToGuestAddr(new_value)) ->pNext, holder.extensions_, out_of_memory), .type = VkAccelerationStructureTypeKHR( ToHostAddr>(ToGuestAddr(new_value)) ->type), .flags = VkBuildAccelerationStructureFlagsKHR( ToHostAddr>(ToGuestAddr(new_value)) ->flags), .mode = VkBuildAccelerationStructureModeKHR( ToHostAddr>(ToGuestAddr(new_value)) ->mode), .srcAccelerationStructure = VkAccelerationStructureKHR( ToHostAddr>(ToGuestAddr(new_value)) ->srcAccelerationStructure), .dstAccelerationStructure = VkAccelerationStructureKHR( ToHostAddr>(ToGuestAddr(new_value)) ->dstAccelerationStructure), .geometryCount = std::uint32_t( ToHostAddr>(ToGuestAddr(new_value)) ->geometryCount), .pGeometries = ToHostType( ToHostAddr>(ToGuestAddr(new_value)) ->pGeometries, ToHostAddr>(ToGuestAddr(new_value)) ->ppGeometries, holder.pGeometries_holder_, ToHostAddr>(ToGuestAddr(new_value)) ->geometryCount, out_of_memory), .ppGeometries = nullptr}; return &holder.data_; } `) } if err != nil { return err } } else { if typе.(vulkan_xml.ExtendedStructInfo).OptionalStruct() { _, err = fmt.Fprintf(w, `class GuestType::GuestHolder: GuestHolderBase { public: virtual ~GuestHolder() override = default; `, name) } else { _, err = fmt.Fprintf(w, `class GuestType::GuestHolder { public: `, name) } if err != nil { return err } if name == "VkDescriptorGetInfoEXT" { if isInputCompatible(typе, host_arch, guest_arch) { _, err = fmt.Fprintf(w, ` friend GuestType; private: }; inline GuestType::GuestType( const VkDescriptorGetInfoEXT* new_value, GuestType::GuestHolder&, bool&) : value_(bit_cast(new_value)) {} #endif `) } else { _, err = fmt.Fprintf(w, ` friend GuestType; private: friend GuestType ConvertOptionalStructures(void* head, std::unique_ptr& holder, bool& out_of_memory); friend GuestType ConvertOptionalStructures(const void* head, std::unique_ptr& holder, bool& out_of_memory); #if defined(__i386__) GuestType::GuestHolder VkDescriptorImageInfo_holder_; #endif GuestType::GuestHolder VkDescriptorAddressInfoEXT_holder_; std::unique_ptr extensions_; GuestType data_; }; inline GuestType::GuestType( const VkDescriptorGetInfoEXT* new_value, GuestType::GuestHolder& holder, [[maybe_unused]] bool& out_of_memory) : value_(new_value == nullptr ? kNullGuestAddr : ToGuestAddr(&holder.data_)) { if (new_value != nullptr) { holder.data_ = { .sType = GuestType(new_value->sType), .pNext = ConvertOptionalStructures(new_value->pNext, holder.extensions_, out_of_memory), .type = GuestType(new_value->type), .data = new_value->type == BERBERIS_VK_DESCRIPTOR_TYPE_SAMPLER ? GuestType{.uniоn = {.pSampler = GuestType( new_value->data.pSampler)}} : new_value->type == BERBERIS_VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER || new_value->type == BERBERIS_VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT || new_value->type == BERBERIS_VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE || new_value->type == BERBERIS_VK_DESCRIPTOR_TYPE_STORAGE_IMAGE #if defined(__i386__) ? GuestType{.uniоn = {.pSampledImage = GuestType( new_value->data.pSampledImage, holder.VkDescriptorImageInfo_holder_, out_of_memory)}} #else ? GuestType{ .uniоn = {.pSampledImage = bit_cast>( GuestType( new_value->data.pSampledImage))}} #endif : new_value->type == BERBERIS_VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_KHR || new_value->type == BERBERIS_VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_NV ? GuestType{.uniоn = {.accelerationStructure = GuestType( new_value->data .accelerationStructure)}} : GuestType{ .uniоn = {.pUniformBuffer = GuestType( new_value->data.pUniformBuffer, holder.VkDescriptorAddressInfoEXT_holder_, out_of_memory)}}}; } } `) } } else { holder := "" if extensible_type { holder = "std::unique_ptr extensions_;\n " } holder += "GuestType<" + name + "> data_;" if isInputCompatible(typе, host_arch, guest_arch) { _, err = fmt.Fprintf(w, ` friend GuestType; private: }; inline GuestType::GuestType(const %[1]s* new_value, GuestType::GuestHolder&, bool&) : value_(bit_cast(new_value)) { } `, name) } else { _, err = fmt.Fprintf(w, ` friend GuestType; private: friend GuestType ConvertOptionalStructures(void* head, std::unique_ptr& holder, bool& out_of_memory); friend GuestType ConvertOptionalStructures(const void* head, std::unique_ptr& holder, bool& out_of_memory); %[2]s }; inline GuestType::GuestType(const %[1]s* new_value, GuestType::GuestHolder& holder, [[maybe_unused]] bool& out_of_memory) : value_(new_value == nullptr ? kNullGuestAddr : ToGuestAddr(&holder.data_)) { if (new_value != nullptr) { holder.data_ = { %[3]s }; } } `, name, strings.Join(append( makeHolderList("GuestType<%s>::Guest%sHolder %s_holder_;", true, typе, host_arch, guest_arch), holder), "\n "), strings.Join(makeGuestInitializerList(initializeConstStruct, name, typе, host_arch, guest_arch), "\n ")) } } if err != nil { return err } if typе.(vulkan_xml.ExtendedStructInfo).OptionalStruct() { _, err = fmt.Fprintf(w, `class GuestType::HostHolder: public HostHolderBase { public: virtual ~HostHolder() override = default; `, name) } else { _, err = fmt.Fprintf(w, `class GuestType::HostHolder { public: `, name) } if err != nil { return err } if name == "VkDescriptorGetInfoEXT" { if isInputCompatible(typе, host_arch, guest_arch) { _, err = fmt.Fprintf(w, ` friend const VkDescriptorGetInfoEXT* ToHostType( const GuestType& new_value, GuestType::HostHolder& holder, bool& out_of_memory); private: }; inline const VkDescriptorGetInfoEXT* ToHostType( const GuestType& new_value, GuestType::HostHolder&, bool&) { return ToHostAddr(ToGuestAddr(new_value)); } `) } else { _, err = fmt.Fprintf(w, ` friend const VkDescriptorGetInfoEXT* ToHostType( const GuestType& new_value, GuestType::HostHolder& holder, bool& out_of_memory); private: friend void* ConvertOptionalStructures(GuestType head, std::unique_ptr& holder, bool& out_of_memory); friend const void* ConvertOptionalStructures(GuestType head, std::unique_ptr& holder, bool& out_of_memory); #if defined(__i386__) GuestType::HostHolder VkDescriptorImageInfo_holder_; #endif GuestType::HostHolder VkDescriptorAddressInfoEXT_holder_; std::unique_ptr extensions_; VkDescriptorGetInfoEXT data_; }; inline const VkDescriptorGetInfoEXT* ToHostType( const GuestType& new_value, GuestType::HostHolder& holder, [[maybe_unused]] bool& out_of_memory) { if (ToGuestAddr(new_value) == kNullGuestAddr) { return nullptr; } holder.data_ = { .sType = VkStructureType( ToHostAddr>(ToGuestAddr(new_value))->sType), .pNext = ConvertOptionalStructures( ToHostAddr>(ToGuestAddr(new_value))->pNext, holder.extensions_, out_of_memory), .type = VkDescriptorType( ToHostAddr>(ToGuestAddr(new_value))->type), .data = VkDescriptorType( ToHostAddr>(ToGuestAddr(new_value))->type) == BERBERIS_VK_DESCRIPTOR_TYPE_SAMPLER ? VkDescriptorDataEXT{.pSampler = ToHostAddr( ToGuestAddr(ToHostAddr>( ToGuestAddr(new_value)) ->data.uniоn.pSampler))} : VkDescriptorType( ToHostAddr>(ToGuestAddr(new_value))->type) == BERBERIS_VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER || VkDescriptorType( ToHostAddr>(ToGuestAddr(new_value)) ->type) == BERBERIS_VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT || VkDescriptorType( ToHostAddr>(ToGuestAddr(new_value)) ->type) == BERBERIS_VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE || VkDescriptorType( ToHostAddr>(ToGuestAddr(new_value)) ->type) == BERBERIS_VK_DESCRIPTOR_TYPE_STORAGE_IMAGE #if defined(__i386__) ? VkDescriptorDataEXT{.pSampledImage = ToHostType(ToHostAddr>( ToGuestAddr(new_value)) ->data.uniоn.pSampledImage, holder.VkDescriptorImageInfo_holder_, out_of_memory)} #else ? VkDescriptorDataEXT{ .pSampledImage = bit_cast( ToHostAddr>( ToGuestAddr(new_value))->data.uniоn.pSampledImage)} #endif : VkDescriptorType( ToHostAddr>(ToGuestAddr(new_value))->type) == BERBERIS_VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_KHR || VkDescriptorType( ToHostAddr>(ToGuestAddr(new_value)) ->type) == BERBERIS_VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_NV ? VkDescriptorDataEXT{.accelerationStructure = VkDeviceAddress( ToHostAddr>( ToGuestAddr(new_value)) ->data.uniоn.accelerationStructure)} : VkDescriptorDataEXT{ .pUniformBuffer = ToHostType( ToHostAddr>(ToGuestAddr(new_value)) ->data.uniоn.pUniformBuffer, holder.VkDescriptorAddressInfoEXT_holder_, out_of_memory)}}; return &holder.data_; } `) } } else { holder := "" if extensible_type { holder = "std::unique_ptr extensions_;\n " } holder += name + " data_;" extra_holder_argument := "" if name == "VkCommandBufferBeginInfo" { extra_holder_argument = "[[maybe_unused]] bool has_inheritance_info, " } if isInputCompatible(typе, host_arch, guest_arch) { _, err = fmt.Fprintf(w, ` friend const %[1]s* ToHostType(const GuestType& new_value, GuestType::HostHolder& holder, %[2]sbool& out_of_memory); private: }; inline const %[1]s* ToHostType(const GuestType& new_value, GuestType::HostHolder&, bool&) { return ToHostAddr(ToGuestAddr(new_value)); } `, name, extra_holder_argument) } else { _, err = fmt.Fprintf(w, ` friend const %[1]s* ToHostType(const GuestType& new_value, GuestType::HostHolder& holder, %[4]sbool& out_of_memory); private: friend void* ConvertOptionalStructures(GuestType head, std::unique_ptr& holder, bool& out_of_memory); friend const void* ConvertOptionalStructures(GuestType head, std::unique_ptr& holder, bool& out_of_memory); %[2]s }; inline const %[1]s* ToHostType(const GuestType& new_value, GuestType::HostHolder& holder, %[4]s[[maybe_unused]] bool& out_of_memory) { if (ToGuestAddr(new_value) == kNullGuestAddr) { return nullptr; } holder.data_ = { %[3]s }; return &holder.data_; } `, name, strings.Join(append( makeHolderList("GuestType<%s>::Host%sHolder %s_holder_;", true, typе, host_arch, guest_arch), holder), "\n "), strings.Join(makeHostInitializerList(initializeConstStruct, name, typе, host_arch, guest_arch), "\n "), extra_holder_argument) } } if err != nil { return err } } } if conversion[name].need_const_array_convertor { if name == "VkAccelerationStructureGeometryKHR" { if isInputCompatible(typе, host_arch, guest_arch) { _, err = fmt.Fprintf(w, `class GuestType::GuestArrayHolder { public: ~GuestArrayHolder(); friend GuestType; private: }; inline GuestType::GuestArrayHolder::~GuestArrayHolder() { } inline GuestType::GuestType( const VkAccelerationStructureGeometryKHR* const new_value, const VkAccelerationStructureGeometryKHR* const* const, GuestType::GuestArrayHolder&, std::size_t, bool&) : value_(bit_cast(new_value)) { } class GuestType::HostArrayHolder { public: ~HostArrayHolder(); friend const VkAccelerationStructureGeometryKHR* ToHostType( const GuestType& new_value, GuestType const new_value_ptr, GuestType::HostArrayHolder& holder, std::size_t size, bool& out_of_memory); private: }; inline GuestType::HostArrayHolder::~HostArrayHolder() { } inline const VkAccelerationStructureGeometryKHR* ToHostType( const GuestType& new_value, GuestType const, GuestType::HostArrayHolder&, std::size_t, bool&) { return ToHostAddr(ToGuestAddr(new_value)); } `) } else { _, err = fmt.Fprintf(w, `class GuestType::GuestArrayHolder { public: ~GuestArrayHolder(); friend GuestType; private: friend GuestType ConvertOptionalStructures(void* head, std::unique_ptr& holder, bool& out_of_memory); friend GuestType ConvertOptionalStructures(const void* head, std::unique_ptr& holder, bool& out_of_memory); std::size_t size_ = 0; const VkAccelerationStructureGeometryKHR* origin_ = nullptr; std::unique_ptr extensions_; GuestType* data_ = nullptr; }; inline GuestType::GuestArrayHolder::~GuestArrayHolder() { delete[] data_; } inline GuestType::GuestType( const VkAccelerationStructureGeometryKHR* const new_value, const VkAccelerationStructureGeometryKHR* const* const new_value_ptr, GuestType::GuestArrayHolder& holder, std::size_t size, [[maybe_unused]] bool& out_of_memory) : value_((new_value == nullptr && new_value_ptr == nullptr) ? kNullGuestAddr : ToGuestAddr(holder.data_ = new (std::nothrow) GuestType[size])) { if ((new_value != nullptr || new_value_ptr != nullptr) && value_ == kNullGuestAddr) { out_of_memory = true; return; } holder.size_ = size; holder.origin_ = new_value; if (new_value != nullptr) { for (std::size_t index = 0; index < size; ++index) { ToHostAddr>(value_)[index] = { .sType = GuestType(new_value[index].sType), .pNext = ConvertOptionalStructures(new_value[index].pNext, holder.extensions_, out_of_memory), .geometryType = GuestType(new_value[index].geometryType), .flags = GuestType(new_value[index].flags)}; } } else { for (std::size_t index = 0; index < size; ++index) { ToHostAddr>(value_)[index] = { .sType = GuestType(new_value_ptr[index]->sType), .pNext = ConvertOptionalStructures(new_value_ptr[index]->pNext, holder.extensions_, out_of_memory), .geometryType = GuestType(new_value_ptr[index]->geometryType), .flags = GuestType(new_value_ptr[index]->flags)}; } } } class GuestType::HostArrayHolder { public: ~HostArrayHolder(); friend const VkAccelerationStructureGeometryKHR* ToHostType( const GuestType& new_value, GuestType const new_value_ptr, GuestType::HostArrayHolder& holder, std::size_t size, bool& out_of_memory); private: friend void* ConvertOptionalStructures(GuestType head, std::unique_ptr& holder, bool& out_of_memory); friend const void* ConvertOptionalStructures(GuestType head, std::unique_ptr& holder, bool& out_of_memory); std::size_t size_ = 0; std::unique_ptr extensions_; VkAccelerationStructureGeometryKHR* data_ = nullptr; }; inline GuestType::HostArrayHolder::~HostArrayHolder() { delete[] data_; } inline const VkAccelerationStructureGeometryKHR* ToHostType( const GuestType& new_value, GuestType const new_value_ptr, GuestType::HostArrayHolder& holder, std::size_t size, bool& out_of_memory) { if (ToGuestAddr(new_value) == kNullGuestAddr && ToGuestAddr(new_value_ptr) == kNullGuestAddr) { return nullptr; } if ((holder.data_ = new (std::nothrow) VkAccelerationStructureGeometryKHR[size]) == nullptr) { out_of_memory = true; return nullptr; } holder.size_ = size; if (ToGuestAddr(new_value) != 0) { for (std::size_t index = 0; index < size; ++index) { holder.data_[index] = { .sType = VkStructureType(ToHostAddr>( ToGuestAddr(new_value))[index] .sType), .pNext = ConvertOptionalStructures(ToHostAddr>( ToGuestAddr(new_value))[index] .pNext, holder.extensions_, out_of_memory), .geometryType = VkGeometryTypeKHR(ToHostAddr>( ToGuestAddr(new_value))[index] .geometryType), .flags = VkGeometryFlagsKHR(ToHostAddr>( ToGuestAddr(new_value))[index] .flags)}; } } else { for (std::size_t index = 0; index < size; ++index) { holder.data_[index] = { .sType = VkStructureType(ToHostAddr*>( ToGuestAddr(new_value_ptr))[index] ->sType), .pNext = ConvertOptionalStructures(ToHostAddr*>( ToGuestAddr(new_value))[index] ->pNext, holder.extensions_, out_of_memory), .geometryType = VkGeometryTypeKHR(ToHostAddr*>( ToGuestAddr(new_value))[index] ->geometryType), .flags = VkGeometryFlagsKHR(ToHostAddr*>( ToGuestAddr(new_value))[index] ->flags)}; } } return holder.data_; } `) } if err != nil { return err } } else if name == "VkAccelerationStructureBuildGeometryInfoKHR" { if isInputCompatible(typе, host_arch, guest_arch) { _, err = fmt.Fprintf(w, `class GuestType::GuestArrayHolder { public: ~GuestArrayHolder(); friend GuestType; private: }; inline GuestType< const VkAccelerationStructureBuildGeometryInfoKHR*>::GuestArrayHolder::~GuestArrayHolder() { } inline GuestType::GuestType( const VkAccelerationStructureBuildGeometryInfoKHR* const new_value, GuestType::GuestArrayHolder&, std::size_t, bool&) : value_(bit_cast(new_value)) { } class GuestType::HostArrayHolder { public: ~HostArrayHolder(); friend const VkAccelerationStructureBuildGeometryInfoKHR* ToHostType( const GuestType& new_value, GuestType::HostArrayHolder& holder, std::size_t size, bool& out_of_memory); private: }; inline GuestType< const VkAccelerationStructureBuildGeometryInfoKHR*>::HostArrayHolder::~HostArrayHolder() { } inline const VkAccelerationStructureBuildGeometryInfoKHR* ToHostType( const GuestType& new_value, GuestType::HostArrayHolder&, std::size_t, bool&) { return ToHostAddr(ToGuestAddr(new_value)); } `) } else { _, err = fmt.Fprintf(w, `class GuestType::GuestArrayHolder { public: ~GuestArrayHolder(); friend GuestType; private: friend GuestType ConvertOptionalStructures(void* head, std::unique_ptr& holder, bool& out_of_memory); friend GuestType ConvertOptionalStructures(const void* head, std::unique_ptr& holder, bool& out_of_memory); std::size_t size_ = 0; std::unique_ptr extensions_; GuestType* data_ = nullptr; GuestType::GuestArrayHolder* pGeometries_holder_ = nullptr; }; inline GuestType< const VkAccelerationStructureBuildGeometryInfoKHR*>::GuestArrayHolder::~GuestArrayHolder() { delete[] pGeometries_holder_; delete[] data_; } inline GuestType::GuestType( const VkAccelerationStructureBuildGeometryInfoKHR* const new_value, GuestType::GuestArrayHolder& holder, std::size_t size, [[maybe_unused]] bool& out_of_memory) : value_(new_value == nullptr ? kNullGuestAddr : ToGuestAddr( holder.data_ = new (std::nothrow) GuestType[size])) { if (new_value == nullptr) { holder.pGeometries_holder_ = new (std::nothrow) GuestType::GuestArrayHolder[size]; } if (new_value != nullptr && (value_ == kNullGuestAddr || holder.pGeometries_holder_ == nullptr)) { out_of_memory = true; return; } holder.size_ = size; for (std::size_t index = 0; index < size; ++index) { ToHostAddr>(value_)[index] = { .sType = GuestType(new_value[index].sType), .pNext = ConvertOptionalStructures(new_value[index].pNext, holder.extensions_, out_of_memory), .type = GuestType(new_value[index].type), .flags = GuestType(new_value[index].flags), .mode = GuestType(new_value[index].mode), .srcAccelerationStructure = GuestType(new_value[index].srcAccelerationStructure), .dstAccelerationStructure = GuestType(new_value[index].dstAccelerationStructure), .geometryCount = GuestType(new_value[index].geometryCount), .pGeometries = GuestType( new_value[index].pGeometries, new_value[index].ppGeometries, holder.pGeometries_holder_[index], new_value[index].geometryCount, out_of_memory), .ppGeometries = GuestType(nullptr)}; } } class GuestType::HostArrayHolder { public: ~HostArrayHolder(); friend const VkAccelerationStructureBuildGeometryInfoKHR* ToHostType( const GuestType& new_value, GuestType::HostArrayHolder& holder, std::size_t size, bool& out_of_memory); private: friend void* ConvertOptionalStructures(GuestType head, std::unique_ptr& holder, bool& out_of_memory); friend const void* ConvertOptionalStructures(GuestType head, std::unique_ptr& holder, bool& out_of_memory); std::size_t size_ = 0; std::unique_ptr extensions_; GuestType* origin_ = nullptr; VkAccelerationStructureBuildGeometryInfoKHR* data_ = nullptr; GuestType::HostArrayHolder pGeometries_holder_; }; inline GuestType< const VkAccelerationStructureBuildGeometryInfoKHR*>::HostArrayHolder::~HostArrayHolder() { delete[] data_; } inline const VkAccelerationStructureBuildGeometryInfoKHR* ToHostType( const GuestType& new_value, GuestType::HostArrayHolder& holder, std::size_t size, bool& out_of_memory) { if (ToGuestAddr(new_value) == kNullGuestAddr) { return nullptr; } if ((holder.data_ = new (std::nothrow) VkAccelerationStructureBuildGeometryInfoKHR[size]) == nullptr) { out_of_memory = true; return nullptr; } holder.size_ = size; holder.origin_ = ToHostAddr>( ToGuestAddr(new_value)); for (std::size_t index = 0; index < size; ++index) { holder.data_[index] = { .sType = VkStructureType(ToHostAddr>( ToGuestAddr(new_value))[index] .sType), .pNext = ConvertOptionalStructures(ToHostAddr>( ToGuestAddr(new_value))[index] .pNext, holder.extensions_, out_of_memory), .type = VkAccelerationStructureTypeKHR( ToHostAddr>( ToGuestAddr(new_value))[index] .type), .flags = VkBuildAccelerationStructureFlagsKHR( ToHostAddr>( ToGuestAddr(new_value))[index] .flags), .mode = VkBuildAccelerationStructureModeKHR( ToHostAddr>( ToGuestAddr(new_value))[index] .mode), .srcAccelerationStructure = VkAccelerationStructureKHR( ToHostAddr>( ToGuestAddr(new_value))[index] .srcAccelerationStructure), .dstAccelerationStructure = VkAccelerationStructureKHR( ToHostAddr>( ToGuestAddr(new_value))[index] .dstAccelerationStructure), .geometryCount = std::uint32_t(ToHostAddr>( ToGuestAddr(new_value))[index] .geometryCount), .pGeometries = ToHostType(ToHostAddr>( ToGuestAddr(new_value))[index] .pGeometries, ToHostAddr>( ToGuestAddr(new_value))[index] .ppGeometries, holder.pGeometries_holder_, ToHostAddr>( ToGuestAddr(new_value))[index] .geometryCount, out_of_memory), .ppGeometries = nullptr}; } return holder.data_; } `) } if err != nil { return err } } else if name == "VkGraphicsPipelineCreateInfo" { // Some fields can be invalid depending on whether rasterization is enabled. // We cannot touch/convert them automatically. Thus manual implementation. // All modifications compared to autogenerated version are marked with MOD START/END. _, err = fmt.Fprintf(w, `class GuestType::GuestArrayHolder { public: ~GuestArrayHolder(); friend GuestType; private: friend GuestType ConvertOptionalStructures(void* head, std::unique_ptr& holder, bool& out_of_memory); friend GuestType ConvertOptionalStructures(const void* head, std::unique_ptr& holder, bool& out_of_memory); std::size_t size_ = 0; std::unique_ptr extensions_; GuestType* data_ = nullptr; GuestType::GuestArrayHolder* pStages_holder_ = nullptr; GuestType::GuestHolder* pVertexInputState_holder_ = nullptr; GuestType::GuestHolder* pInputAssemblyState_holder_ = nullptr; GuestType::GuestHolder* pTessellationState_holder_ = nullptr; GuestType::GuestHolder* pViewportState_holder_ = nullptr; GuestType::GuestHolder* pRasterizationState_holder_ = nullptr; GuestType::GuestHolder* pMultisampleState_holder_ = nullptr; GuestType::GuestHolder* pDepthStencilState_holder_ = nullptr; GuestType::GuestHolder* pColorBlendState_holder_ = nullptr; GuestType::GuestHolder* pDynamicState_holder_ = nullptr; }; inline GuestType::GuestArrayHolder::~GuestArrayHolder() { delete[] data_; delete[] pStages_holder_; delete[] pVertexInputState_holder_; delete[] pInputAssemblyState_holder_; delete[] pTessellationState_holder_; delete[] pViewportState_holder_; delete[] pRasterizationState_holder_; delete[] pMultisampleState_holder_; delete[] pDepthStencilState_holder_; delete[] pColorBlendState_holder_; delete[] pDynamicState_holder_; } inline GuestType::GuestType(const VkGraphicsPipelineCreateInfo* const new_value, GuestType::GuestArrayHolder& holder, std::size_t size, [[maybe_unused]] bool& out_of_memory) : value_(new_value == nullptr ? kNullGuestAddr : ToGuestAddr(holder.data_ = new (std::nothrow) GuestType[size])) { if (new_value != nullptr && value_ == kNullGuestAddr) { out_of_memory = true; return; } holder.size_ = size; if ((holder.pStages_holder_ = new (std::nothrow) GuestType::GuestArrayHolder[size]) == nullptr) { out_of_memory = true; return; }; if ((holder.pVertexInputState_holder_ = new (std::nothrow) GuestType::GuestHolder[size]) == nullptr) { out_of_memory = true; return; }; if ((holder.pInputAssemblyState_holder_ = new (std::nothrow) GuestType::GuestHolder[size]) == nullptr) { out_of_memory = true; return; }; if ((holder.pTessellationState_holder_ = new (std::nothrow) GuestType::GuestHolder[size]) == nullptr) { out_of_memory = true; return; }; if ((holder.pViewportState_holder_ = new (std::nothrow) GuestType::GuestHolder[size]) == nullptr) { out_of_memory = true; return; }; if ((holder.pRasterizationState_holder_ = new (std::nothrow) GuestType::GuestHolder[size]) == nullptr) { out_of_memory = true; return; }; if ((holder.pMultisampleState_holder_ = new (std::nothrow) GuestType::GuestHolder[size]) == nullptr) { out_of_memory = true; return; }; if ((holder.pDepthStencilState_holder_ = new (std::nothrow) GuestType::GuestHolder[size]) == nullptr) { out_of_memory = true; return; }; if ((holder.pColorBlendState_holder_ = new (std::nothrow) GuestType::GuestHolder[size]) == nullptr) { out_of_memory = true; return; }; if ((holder.pDynamicState_holder_ = new (std::nothrow) GuestType::GuestHolder[size]) == nullptr) { out_of_memory = true; return; }; for (std::size_t index = 0; index < size; ++index) { // MOD START auto* host_rasterization_state = new_value[index].pRasterizationState; bool rasterization_enabled = host_rasterization_state->rasterizerDiscardEnable == BERBERIS_VK_FALSE || // When dynamic state presents the rasterization may be enabled later. new_value[index].pDynamicState; // MOD END ToHostAddr>(value_)[index] = { .sType = GuestType(new_value[index].sType), .pNext = ConvertOptionalStructures(new_value[index].pNext, holder.extensions_, out_of_memory), .flags = GuestType(new_value[index].flags), .stageCount = GuestType(new_value[index].stageCount), .pStages = GuestType(new_value[index].pStages, holder.pStages_holder_[index], new_value[index].stageCount, out_of_memory), .pVertexInputState = GuestType(new_value[index].pVertexInputState, holder.pVertexInputState_holder_[index], out_of_memory), .pInputAssemblyState = GuestType(new_value[index].pInputAssemblyState, holder.pInputAssemblyState_holder_[index], out_of_memory), .pTessellationState = GuestType(new_value[index].pTessellationState, holder.pTessellationState_holder_[index], out_of_memory), // MOD START .pViewportState = rasterization_enabled ? GuestType(new_value[index].pViewportState, holder.pViewportState_holder_[index], out_of_memory) : GuestType(), .pRasterizationState = GuestType(new_value[index].pRasterizationState, holder.pRasterizationState_holder_[index], out_of_memory), .pMultisampleState = rasterization_enabled ? GuestType(new_value[index].pMultisampleState, holder.pMultisampleState_holder_[index], out_of_memory) : GuestType(), .pDepthStencilState = rasterization_enabled ? GuestType(new_value[index].pDepthStencilState, holder.pDepthStencilState_holder_[index], out_of_memory) : GuestType(), .pColorBlendState = rasterization_enabled ? GuestType(new_value[index].pColorBlendState, holder.pColorBlendState_holder_[index], out_of_memory) : GuestType(), // MOD END .pDynamicState = GuestType(new_value[index].pDynamicState, holder.pDynamicState_holder_[index], out_of_memory), .layout = GuestType(new_value[index].layout), .renderPass = GuestType(new_value[index].renderPass), .subpass = GuestType(new_value[index].subpass), .basePipelineHandle = GuestType(new_value[index].basePipelineHandle), .basePipelineIndex = GuestType(new_value[index].basePipelineIndex) }; } } class GuestType::HostArrayHolder { public: ~HostArrayHolder(); friend const VkGraphicsPipelineCreateInfo* ToHostType(const GuestType& new_value, GuestType::HostArrayHolder& holder, std::size_t size, bool& out_of_memory); private: friend void* ConvertOptionalStructures(GuestType head, std::unique_ptr& holder, bool& out_of_memory); friend const void* ConvertOptionalStructures(GuestType head, std::unique_ptr& holder, bool& out_of_memory); std::size_t size_ = 0; std::unique_ptr extensions_; VkGraphicsPipelineCreateInfo* data_ = nullptr; GuestType::HostArrayHolder* pStages_holder_ = nullptr; GuestType::HostHolder* pVertexInputState_holder_ = nullptr; GuestType::HostHolder* pInputAssemblyState_holder_ = nullptr; GuestType::HostHolder* pTessellationState_holder_ = nullptr; GuestType::HostHolder* pViewportState_holder_ = nullptr; GuestType::HostHolder* pRasterizationState_holder_ = nullptr; GuestType::HostHolder* pMultisampleState_holder_ = nullptr; GuestType::HostHolder* pDepthStencilState_holder_ = nullptr; GuestType::HostHolder* pColorBlendState_holder_ = nullptr; GuestType::HostHolder* pDynamicState_holder_ = nullptr; }; inline GuestType::HostArrayHolder::~HostArrayHolder() { delete[] data_; delete[] pStages_holder_; delete[] pVertexInputState_holder_; delete[] pInputAssemblyState_holder_; delete[] pTessellationState_holder_; delete[] pViewportState_holder_; delete[] pRasterizationState_holder_; delete[] pMultisampleState_holder_; delete[] pDepthStencilState_holder_; delete[] pColorBlendState_holder_; delete[] pDynamicState_holder_; } inline const VkGraphicsPipelineCreateInfo* ToHostType(const GuestType& new_value, GuestType::HostArrayHolder& holder, std::size_t size, bool& out_of_memory) { if (ToGuestAddr(new_value) == kNullGuestAddr) { return nullptr; } if ((holder.data_ = new (std::nothrow) VkGraphicsPipelineCreateInfo[size]) == nullptr) { out_of_memory = true; return nullptr; } if ((holder.pStages_holder_ = new (std::nothrow) GuestType::HostArrayHolder[size]) == nullptr) { out_of_memory = true; return nullptr; }; if ((holder.pVertexInputState_holder_ = new (std::nothrow) GuestType::HostHolder[size]) == nullptr) { out_of_memory = true; return nullptr; }; if ((holder.pInputAssemblyState_holder_ = new (std::nothrow) GuestType::HostHolder[size]) == nullptr) { out_of_memory = true; return nullptr; }; if ((holder.pTessellationState_holder_ = new (std::nothrow) GuestType::HostHolder[size]) == nullptr) { out_of_memory = true; return nullptr; }; if ((holder.pViewportState_holder_ = new (std::nothrow) GuestType::HostHolder[size]) == nullptr) { out_of_memory = true; return nullptr; }; if ((holder.pRasterizationState_holder_ = new (std::nothrow) GuestType::HostHolder[size]) == nullptr) { out_of_memory = true; return nullptr; }; if ((holder.pMultisampleState_holder_ = new (std::nothrow) GuestType::HostHolder[size]) == nullptr) { out_of_memory = true; return nullptr; }; if ((holder.pDepthStencilState_holder_ = new (std::nothrow) GuestType::HostHolder[size]) == nullptr) { out_of_memory = true; return nullptr; }; if ((holder.pColorBlendState_holder_ = new (std::nothrow) GuestType::HostHolder[size]) == nullptr) { out_of_memory = true; return nullptr; }; if ((holder.pDynamicState_holder_ = new (std::nothrow) GuestType::HostHolder[size]) == nullptr) { out_of_memory = true; return nullptr; }; holder.size_ = size; for (std::size_t index = 0; index < size; ++index) { // MOD START auto& pipeline_info = ToHostAddr>(ToGuestAddr(new_value))[index]; auto guest_rasterization_state = pipeline_info.pRasterizationState; bool rasterization_enabled = ToHostAddr>(ToGuestAddr(guest_rasterization_state))->rasterizerDiscardEnable == BERBERIS_VK_FALSE // When dynamic state presents the rasterization may be enabled later. || ToGuestAddr(pipeline_info.pDynamicState) != kNullGuestAddr; // MOD END holder.data_[index] = { .sType = VkStructureType(ToHostAddr>(ToGuestAddr(new_value))[index].sType), .pNext = ConvertOptionalStructures(ToHostAddr>(ToGuestAddr(new_value))[index].pNext, holder.extensions_, out_of_memory), .flags = VkPipelineCreateFlags(ToHostAddr>(ToGuestAddr(new_value))[index].flags), .stageCount = std::uint32_t(ToHostAddr>(ToGuestAddr(new_value))[index].stageCount), .pStages = ToHostType(ToHostAddr>(ToGuestAddr(new_value))[index].pStages, holder.pStages_holder_[index], ToHostAddr>(ToGuestAddr(new_value))[index].stageCount, out_of_memory), .pVertexInputState = ToHostType(ToHostAddr>(ToGuestAddr(new_value))[index].pVertexInputState, holder.pVertexInputState_holder_[index], out_of_memory), .pInputAssemblyState = ToHostType(ToHostAddr>(ToGuestAddr(new_value))[index].pInputAssemblyState, holder.pInputAssemblyState_holder_[index], out_of_memory), .pTessellationState = ToHostType(ToHostAddr>(ToGuestAddr(new_value))[index].pTessellationState, holder.pTessellationState_holder_[index], out_of_memory), // MOD START .pViewportState = rasterization_enabled ? ToHostType(pipeline_info.pViewportState, holder.pViewportState_holder_[index], out_of_memory) : nullptr, .pRasterizationState = ToHostType(pipeline_info.pRasterizationState, holder.pRasterizationState_holder_[index], out_of_memory), .pMultisampleState = rasterization_enabled ? ToHostType(pipeline_info.pMultisampleState, holder.pMultisampleState_holder_[index], out_of_memory) : nullptr, .pDepthStencilState = rasterization_enabled ? ToHostType(pipeline_info.pDepthStencilState, holder.pDepthStencilState_holder_[index], out_of_memory) : nullptr, .pColorBlendState = rasterization_enabled ? ToHostType(pipeline_info.pColorBlendState, holder.pColorBlendState_holder_[index], out_of_memory) : nullptr, // MOD END .pDynamicState = ToHostType(ToHostAddr>(ToGuestAddr(new_value))[index].pDynamicState, holder.pDynamicState_holder_[index], out_of_memory), .layout = VkPipelineLayout(ToHostAddr>(ToGuestAddr(new_value))[index].layout), .renderPass = VkRenderPass(ToHostAddr>(ToGuestAddr(new_value))[index].renderPass), .subpass = std::uint32_t(ToHostAddr>(ToGuestAddr(new_value))[index].subpass), .basePipelineHandle = VkPipeline(ToHostAddr>(ToGuestAddr(new_value))[index].basePipelineHandle), .basePipelineIndex = std::int32_t(ToHostAddr>(ToGuestAddr(new_value))[index].basePipelineIndex) }; } return holder.data_; } `) if err != nil { return err } } else { holder := "" if extensible_type { holder = "std::unique_ptr extensions_;\n " } holder += "GuestType* data_ = nullptr;" if isInputCompatible(typе, host_arch, guest_arch) { _, err = fmt.Fprintf(w, `class GuestType::GuestArrayHolder { public: ~GuestArrayHolder(); friend GuestType; private: }; inline GuestType::GuestArrayHolder::~GuestArrayHolder() { } inline GuestType::GuestType(const %[1]s* const new_value, GuestType::GuestArrayHolder&, std::size_t, bool&) : value_(bit_cast(new_value)) { } `, name) } else { _, err = fmt.Fprintf(w, `class GuestType::GuestArrayHolder { public: ~GuestArrayHolder(); friend GuestType; private: friend GuestType ConvertOptionalStructures(void* head, std::unique_ptr& holder, bool& out_of_memory); friend GuestType ConvertOptionalStructures(const void* head, std::unique_ptr& holder, bool& out_of_memory); std::size_t size_ = 0; %[2]s }; inline GuestType::GuestArrayHolder::~GuestArrayHolder() { %[3]s } // Apparently "size == 0" is used for pointers marked as noautovalidity="true" in vk.xml and "new_value == nullptr" is used for pointers marked with optional="true", but it's not clear how consistent is it. // // Better to check both options in all cases, it's not too slow in practice. // inline GuestType::GuestType(const %[1]s* const new_value, GuestType::GuestArrayHolder& holder, std::size_t size, [[maybe_unused]] bool& out_of_memory) : value_((size == 0 || new_value == nullptr) ? kNullGuestAddr : ToGuestAddr(holder.data_ = new (std::nothrow) GuestType[size])) { %[4]s for (std::size_t index = 0; index < size; ++index) { ToHostAddr>(value_)[index] = { %[5]s }; } } `, name, strings.Join(append([]string{holder}, makeHolderList("GuestType<%s>::Guest%sHolder* %s_holder_ = nullptr;", true, typе, host_arch, guest_arch)...), "\n "), strings.Join(append([]string{"delete[] data_;"}, makeHolderList("delete[] %[3]s_holder_;", true, typе, host_arch, guest_arch)...), "\n "), strings.Join(append([]string{"if (value_ == kNullGuestAddr) {\n if (size != 0 && new_value != nullptr) {\n out_of_memory = true;\n }\n return;\n }\n holder.size_ = size;"}, makeHolderList("if ((holder.%[3]s_holder_ = new (std::nothrow) GuestType<%[1]s>::Guest%[2]sHolder[size]) == nullptr) {\n out_of_memory = true;\n return;\n };", true, typе, host_arch, guest_arch)...), "\n "), strings.Join(makeGuestInitializerListForArray(initializeConstStruct, name, typе, host_arch, guest_arch), "\n ")) } if err != nil { return err } holder = "" if extensible_type { holder = "std::unique_ptr extensions_;\n " } holder += name + "* data_ = nullptr;" if isInputCompatible(typе, host_arch, guest_arch) { _, err = fmt.Fprintf(w, `class GuestType::HostArrayHolder { public: ~HostArrayHolder(); friend const %[1]s* ToHostType(const GuestType& new_value, GuestType::HostArrayHolder& holder, std::size_t size, bool& out_of_memory); private: }; inline GuestType::HostArrayHolder::~HostArrayHolder() { } inline const %[1]s* ToHostType(const GuestType& new_value, GuestType::HostArrayHolder&, std::size_t, bool&) { return ToHostAddr(ToGuestAddr(new_value)); } `, name) } else { _, err = fmt.Fprintf(w, `class GuestType::HostArrayHolder { public: ~HostArrayHolder(); friend const %[1]s* ToHostType(const GuestType& new_value, GuestType::HostArrayHolder& holder, std::size_t size, bool& out_of_memory); private: friend void* ConvertOptionalStructures(GuestType head, std::unique_ptr& holder, bool& out_of_memory); friend const void* ConvertOptionalStructures(GuestType head, std::unique_ptr& holder, bool& out_of_memory); std::size_t size_ = 0; %[2]s }; inline GuestType::HostArrayHolder::~HostArrayHolder() { %[3]s } inline const %[1]s* ToHostType(const GuestType& new_value, GuestType::HostArrayHolder& holder, std::size_t size, bool& out_of_memory) { %[4]s for (std::size_t index = 0; index < size; ++index) { holder.data_[index] = { %[5]s }; } return holder.data_; } `, name, strings.Join(append([]string{holder}, makeHolderList("GuestType<%s>::Host%sHolder* %s_holder_ = nullptr;", true, typе, host_arch, guest_arch)...), "\n "), strings.Join(append([]string{"delete[] data_;"}, makeHolderList("delete[] %[3]s_holder_;", true, typе, host_arch, guest_arch)...), "\n "), strings.Join(append([]string{"if (size == 0 || ToGuestAddr(new_value) == kNullGuestAddr) {\n return nullptr;\n }\n holder.size_ = size;\n if ((holder.data_ = new (std::nothrow) " + name + "[size]) == nullptr) {\n out_of_memory = true;\n return nullptr;\n }"}, makeHolderList("if ((holder.%[3]s_holder_ = new (std::nothrow) GuestType<%[1]s>::Host%[2]sHolder[size]) == nullptr) {\n out_of_memory = true;\n return nullptr;\n };", true, typе, host_arch, guest_arch)...), "\n "), strings.Join(makeHostInitializerListForArray(initializeConstStruct, name, typе, host_arch, guest_arch), "\n ")) } if err != nil { return err } } } return nil } func makeHolderList(format string, const_types bool, typе cpp_types.Type, host_arch cpp_types.Arch, guest_arch cpp_types.Arch) []string { holders := []string{} return fillHolderList(holders, format, const_types, typе, host_arch, guest_arch) } func fillHolderList(holders []string, format string, const_types bool, typе cpp_types.Type, host_arch cpp_types.Arch, guest_arch cpp_types.Arch) []string { ids := typе.NumField(cpp_types.FirstArch) for id := uint(0); id < ids; id++ { field_name := typе.Field(id, cpp_types.FirstArch).Name() field_type := typе.Field(id, cpp_types.FirstArch).Type() if !doesNeedHolder(field_type, host_arch, guest_arch) || field_name == "pNext" { continue } switch field_type.Kind(cpp_types.FirstArch) { case cpp_types.Struct, cpp_types.Union: holders = fillHolderList(holders, format, const_types, field_type, host_arch, guest_arch) default: field_length := typе.Field(id, cpp_types.FirstArch).BaseFieldInfo().(vulkan_xml.ExtendedFieldInfo).Length() if field_length == nil { holders = append(holders, fmt.Sprintf( format, field_type.Name(host_arch), "", field_name)) } else { holders = append(holders, fmt.Sprintf( format, field_type.Name(host_arch), "Array", field_name)) } } } return holders } // initializeConstStruct - copy all data elements (used for const types) // initializePointers - copy pointers, zero non-pointers (used for non-const types on input) // initializeDataMembers - copy non-pointers, retain pointers (used for non-const types on output) type initializeStructMode uint const ( initializeConstStruct = iota initializePointers initializeDataMembers ) func makeGuestInitializerList(mode initializeStructMode, name string, typе cpp_types.Type, host_arch cpp_types.Arch, guest_arch cpp_types.Arch) []string { guest_initializer_list := []string{} var prefix string if mode == initializeDataMembers { prefix = "data_." } else { prefix = "new_value->" } return makeInitializerList(guest_initializer_list, mode, true, prefix, "origin_->", "", typе, host_arch, guest_arch) } func makeGuestInitializerListForArray(mode initializeStructMode, name string, typе cpp_types.Type, host_arch cpp_types.Arch, guest_arch cpp_types.Arch) []string { guest_initializer_list := []string{} var prefix string if mode == initializeDataMembers { prefix = "data_[index]." } else { prefix = "new_value[index]." } return makeInitializerList(guest_initializer_list, mode, true, prefix, "origin_[index].", "[index]", typе, host_arch, guest_arch) } func makeHostInitializerList(mode initializeStructMode, name string, typе cpp_types.Type, host_arch cpp_types.Arch, guest_arch cpp_types.Arch) []string { host_initializer_list := []string{} var prefix string if mode == initializeDataMembers { prefix = "data_." } else { prefix = fmt.Sprintf("ToHostAddr>(ToGuestAddr(new_value))->", name) } return makeInitializerList(host_initializer_list, mode, false, prefix, "origin_->", "", typе, host_arch, guest_arch) } func makeHostInitializerListForArray(mode initializeStructMode, name string, typе cpp_types.Type, host_arch cpp_types.Arch, guest_arch cpp_types.Arch) []string { host_initializer_list := []string{} var prefix string if mode == initializeDataMembers { prefix = "data_[index]." } else { prefix = fmt.Sprintf("ToHostAddr>(ToGuestAddr(new_value))[index].", name) } return makeInitializerList(host_initializer_list, mode, false, prefix, "origin_[index].", "[index]", typе, host_arch, guest_arch) } func makeInitializerList(initializer_list []string, mode initializeStructMode, convert_to_guest bool, prefix, origin, index string, typе cpp_types.Type, host_arch cpp_types.Arch, guest_arch cpp_types.Arch) []string { ids := typе.NumField(cpp_types.FirstArch) if mode == initializePointers { // We must ensure that last element produces output to properly generate "comma" variable below. for ids > 0 { if !isPtr(typе.Field(ids-1, cpp_types.FirstArch).Type()) { ids-- } else { break } } } if ids == 0 { return initializer_list } for id := uint(0); id < ids; id++ { field_name := typе.Field(id, cpp_types.FirstArch).Name() field_type := typе.Field(id, cpp_types.FirstArch).Type() if mode == initializePointers { if !isPtr(field_type) && field_name != "sType" { continue } } else if initializeConstStruct == initializeConstStruct { field_type = cpp_types.ConstType(field_type) } field_type_name := targetTypeName(field_type, convert_to_guest) comma := "," if id == ids-1 { comma = "" } initializer := fmt.Sprintf( ".%[1]s = %[2]s(%[3]s%[1]s)%[4]s", field_name, field_type_name, prefix, comma) field_kind := field_type.Kind(cpp_types.FirstArch) if field_kind == cpp_types.Const { field_type = field_type.Elem(cpp_types.FirstArch) field_kind = field_type.Kind(cpp_types.FirstArch) } switch field_kind { case cpp_types.Array: elem := field_type.Elem(cpp_types.FirstArch) field_type_name = targetTypeName(elem, convert_to_guest) initializer_list = append(initializer_list, fmt.Sprintf(".%s = {", field_name)) max_index := field_type.NumField(cpp_types.FirstArch) - 1 if max_index > 0 { for cur_index := uint(0); cur_index < max_index; cur_index++ { if isStruct(elem) && !isInputCompatible(field_type, host_arch, guest_arch) { initializer_list = append(initializer_list, field_type_name+"{") initializer_list = makeInitializerList( initializer_list, mode, convert_to_guest, fmt.Sprintf("%s%s[%d].", prefix, field_name, cur_index), fmt.Sprintf("%s%s[%d].", origin, field_name, cur_index), fmt.Sprintf("[%d]", origin, field_name, cur_index), elem, host_arch, guest_arch) initializer_list = append(initializer_list, "},") continue } initializer_list = append(initializer_list, fmt.Sprintf( " %[2]s%[1]s[%[3]d],", field_name, prefix, cur_index)) } } if isStruct(elem) && !isInputCompatible(field_type, host_arch, guest_arch) { initializer_list = append(initializer_list, field_type_name+"{") initializer_list = makeInitializerList( initializer_list, mode, convert_to_guest, fmt.Sprintf("%s%s[%d].", prefix, field_name, max_index), fmt.Sprintf("%s%s[%d].", origin, field_name, max_index), fmt.Sprintf("[%d]", origin, field_name, max_index), elem, host_arch, guest_arch) initializer_list = append(initializer_list, "}", "}"+comma) } else { initializer_list = append(initializer_list, fmt.Sprintf( " %[2]s%[1]s[%[3]d]}%[4]s", field_name, prefix, max_index, comma)) } continue case cpp_types.Ptr: initializer_fmt := "" if mode == initializeDataMembers { // When we are copying data members we have to keep pointers unchanged. initializer_fmt = ".%[1]s = %[6]s%[1]s%[8]s" // Most Vulkan data structures use "void*" or "const void*" types for the pNext field. // Only two data structures use something different: VkBaseInStructure/VkBaseOutStructure. // Handle them separately. } else if field_name == "pNext" { if field_type.Name(cpp_types.FirstArch) != "void *" && field_type.Name(cpp_types.FirstArch) != "const void *" { panic("Unsupported type of \"pNext\" field") } if mode == initializeConstStruct && !isPtrToConst(field_type) { if convert_to_guest { initializer_fmt = ".%[1]s = ConstCast>(ConvertOptionalStructures(static_cast(%[5]s%[1]s), holder.extensions_, out_of_memory))%[8]s" } else { initializer_fmt = ".%[1]s = const_cast(ConvertOptionalStructures(ConstCast>(%[5]s%[1]s), holder.extensions_, out_of_memory))%[8]s" } } else { if convert_to_guest { initializer_fmt = ".%[1]s = ConvertOptionalStructures(%[5]s%[1]s, holder.extensions_, out_of_memory)%[8]s" } else { initializer_fmt = ".%[1]s = ConvertOptionalStructures(%[5]s%[1]s, holder.extensions_, out_of_memory)%[8]s" } } } else if doesNeedHolder(field_type, host_arch, guest_arch) { // VkCommandBufferBeginInfo has field pInheritanceInfo which may or may not be used and it's quite hard to determine if it can be touched. // Because it's part of Vulkan API and can not be changed we are doing complex dance with maps and lock on the input side. // When layers are involved, though, we assume that nullptr that we are introducing here would pass to other layers thus there is // no need to deal with this problem in the GuestRunners. if !convert_to_guest && field_name == "pInheritanceInfo" && typе.Name(cpp_types.FirstArch) == "struct VkCommandBufferBeginInfo" { initializer_fmt = ".%[1]s = has_inheritance_info ? ToHostType(%[5]s%[1]s, holder.%[1]s_holder_%[7]s%[4]s, out_of_memory) : nullptr%[8]s" // pImageInfo is marked with noautovalidity="true" and there's the following comment: // Sampler, image view, and layout for SAMPLER, COMBINED_IMAGE_SAMPLER, {SAMPLED,STORAGE}_IMAGE, and INPUT_ATTACHMENT descriptor types. // Replace it with nullptr if descriptor type is not in the five listed ones. } else if !convert_to_guest && field_name == "pImageInfo" && typе.Name(cpp_types.FirstArch) == "struct VkWriteDescriptorSet" { initializer_fmt = `.%[1]s = (VkDescriptorType(ToHostAddr>(ToGuestAddr(new_value))[index].descriptorType) == BERBERIS_VK_DESCRIPTOR_TYPE_SAMPLER || VkDescriptorType(ToHostAddr>(ToGuestAddr(new_value))[index].descriptorType) == BERBERIS_VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER || VkDescriptorType(ToHostAddr>(ToGuestAddr(new_value))[index].descriptorType) == BERBERIS_VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE || VkDescriptorType(ToHostAddr>(ToGuestAddr(new_value))[index].descriptorType) == BERBERIS_VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT || VkDescriptorType(ToHostAddr>(ToGuestAddr(new_value))[index].descriptorType) == BERBERIS_VK_DESCRIPTOR_TYPE_STORAGE_IMAGE) ? ToHostType(%[5]s%[1]s, holder.%[1]s_holder_%[7]s%[4]s, out_of_memory): nullptr%[8]s` } else { if convert_to_guest { initializer_fmt = ".%[1]s = GuestType<%[2]s>(%[5]s%[1]s, holder.%[1]s_holder_%[7]s%[4]s, out_of_memory)%[8]s" } else { initializer_fmt = ".%[1]s = ToHostType(%[5]s%[1]s, holder.%[1]s_holder_%[7]s%[4]s, out_of_memory)%[8]s" } } } else if isPtrToFunc(field_type) { if convert_to_guest { initializer_fmt = ".%[1]s = WrapHostFunctionIfNeeded(%[5]s%[1]s, \"%[1]s\")%[8]s" } else { initializer_fmt = ".%[1]s = WrapGuestFunctionIfNeeded(GuestType<%[3]s>(%[5]s%[1]s), \"%[1]s\")%[8]s" } } if initializer_fmt != "" { field_length := typе.Field(id, cpp_types.FirstArch).BaseFieldInfo().(vulkan_xml.ExtendedFieldInfo).Length() field_length_ref := "" if field_length != nil { if mode == initializeConstStruct && !isPtrToConst(field_type) { if convert_to_guest { field_length_ref = ", &" + prefix + field_length.Name() } else { // After taking address we end up with weird “GuestType*>” which we can only convert to “const ::uint32_t*” using bit_cast field_length_ref = ", bit_cast(&" + prefix + field_length.Name() + ")" } } else { field_length_ref = ", " + prefix + field_length.Name() } } initializer = fmt.Sprintf(initializer_fmt, field_name, field_type.Name(cpp_types.FirstArch), field_type_name, field_length_ref, prefix, origin, index, comma) } case cpp_types.Struct: if !isInputCompatible(field_type, host_arch, guest_arch) { initializer_list = append(initializer_list, fmt.Sprintf( ".%[1]s = %[2]s{", field_name, field_type_name)) initializer_list = makeInitializerList( initializer_list, mode, convert_to_guest, prefix+field_name+".", origin+field_name+".", "", field_type, host_arch, guest_arch) initializer = "}" + comma } case cpp_types.Union: // Temprorary kludge. if field_type.BaseName(cpp_types.FirstArch) == "VkDescriptorDataEXT" { continue } if !isInputCompatible(field_type, cpp_types.X86, cpp_types.Arm) || !isInputCompatible(field_type, cpp_types.X86_64, cpp_types.Arm64) { panic("Unsupported union field in incompatible type: " + typе.Name(cpp_types.FirstArch) + " " + field_name) } } initializer_list = append(initializer_list, initializer) } return initializer_list } func targetTypeName(typе cpp_types.Type, convert_to_guest bool) string { if isConst(typе) { typе = typе.Elem(cpp_types.FirstArch) } type_name := typе.Name(cpp_types.FirstArch) // Outer const just makes conversion harder and doesn't affect anything, really, // since we are only producing temporary object here which would be assigned to // proper const field. if convert_to_guest { type_name = fmt.Sprintf("GuestType<%s>", type_name) } else { switch typе.Kind(cpp_types.FirstArch) { case cpp_types.Struct, cpp_types.Union: type_name = typе.BaseName(cpp_types.FirstArch) case cpp_types.Ptr: if !isPtrToFunc(typе) { type_name = "ToHostAddr" } } } return type_name } func doesNeedHolder(typе cpp_types.Type, host_arch cpp_types.Arch, guest_arch cpp_types.Arch) bool { if isInputCompatible(typе, host_arch, guest_arch) { return false } // We are not trying to convert on ARM64 because we don't know if pointer is valid (and if all extensions are compatible). // On ARM32 we need to convert some data structures, but tests pass because of quirk of how they are run. // TODO(b/274875580): fix properly. if guest_arch == cpp_types.Arm64 && typе.Name(cpp_types.Arm64) == "const struct VkCommandBufferInheritanceInfo *" { return false } kind := typе.Kind(cpp_types.FirstArch) switch kind { // We need holders when we are working with pointers to structures. // But pointers to function don't need holders (even if then need coversion routines). case cpp_types.Ptr: pointee := typе.Elem(cpp_types.FirstArch) if isConst(pointee) { pointee = pointee.Elem(cpp_types.FirstArch) } if isStruct(pointee) || isUnion(pointee) { return true } else if isFunc(pointee) { return false } else { panic("Unsupported field in incompatible type: " + typе.Name(cpp_types.FirstArch)) } case cpp_types.Struct, cpp_types.Union: ids := typе.NumField(cpp_types.FirstArch) for id := uint(0); id < ids; id++ { field_type := typе.Field(id, cpp_types.FirstArch).Type() if doesNeedHolder(field_type, host_arch, guest_arch) { return true } } } return false } func printHostStructVerification(w io.Writer, sorted_type_names []string, types map[string]cpp_types.Type, host_arch, guest_arch cpp_types.Arch) error { for _, name := range sorted_type_names { typе := types[name] if !isStruct(typе) && !isUnion(typе) { continue } fields_check_berberis_host := []string{} fields_check_platform_host := []string{} for i := uint(0); i < typе.NumField(host_arch); i++ { field := typе.Field(i, host_arch) var field_offset uint if !isUnion(typе) { field_offset = field.(cpp_types.StructFieldInfo).Offset() } else { field_offset = 0 } fields_check_berberis_host = append(fields_check_berberis_host, fmt.Sprintf("CHECK_FIELD_LAYOUT(berberis::%[1]s, %[2]s, %[3]d, %[4]d);", name, field.Name(), field_offset, field.Type().Bits(host_arch))) fields_check_platform_host = append(fields_check_platform_host, fmt.Sprintf("CHECK_FIELD_LAYOUT(::%[1]s, %[2]s, %[3]d, %[4]d);", name, field.Name(), field_offset, field.Type().Bits(host_arch))) } fields_check_berberis_guest := []string{} fields_check_platform_guest := []string{} for i := uint(0); i < typе.NumField(guest_arch); i++ { field := typе.Field(i, guest_arch) var field_offset uint if !isUnion(typе) { field_offset = field.(cpp_types.StructFieldInfo).Offset() } else { field_offset = 0 } fields_check_berberis_guest = append(fields_check_berberis_guest, fmt.Sprintf("CHECK_FIELD_LAYOUT(berberis::%[1]s, %[2]s, %[3]d, %[4]d);", name, field.Name(), field_offset, field.Type().Bits(guest_arch))) fields_check_platform_guest = append(fields_check_platform_guest, fmt.Sprintf("CHECK_FIELD_LAYOUT(::%[1]s, %[2]s, %[3]d, %[4]d);", name, field.Name(), field_offset, field.Type().Bits(guest_arch))) } _, err := fmt.Fprintf( w, `#if %[7]s CHECK_STRUCT_LAYOUT(berberis::%[2]s, %[3]d, %[4]d); %[9]s #if !defined(BERBERIS_%[1]s) CHECK_STRUCT_LAYOUT(::%[2]s, %[3]d, %[4]d); %[10]s #endif /* BERBERIS_%[1]s */ #elif %[8]s CHECK_STRUCT_LAYOUT(berberis::%[2]s, %[5]d, %[6]d); %[11]s #if !defined(BERBERIS_%[1]s) CHECK_STRUCT_LAYOUT(::%[2]s, %[5]d, %[6]d); %[12]s #endif /* BERBERIS_%[1]s */ #else #error Unsupported architecture. #endif `, toEnumNameWithSuffix(name, "NOVERIFY"), name, typе.Bits(host_arch), typе.Align(host_arch), typе.Bits(guest_arch), typе.Align(guest_arch), cpp_types.Define(host_arch), cpp_types.Define(guest_arch), strings.Join(fields_check_berberis_host, "\n"), strings.Join(fields_check_platform_host, "\n"), strings.Join(fields_check_berberis_guest, "\n"), strings.Join(fields_check_platform_guest, "\n")) if err != nil { return err } } return nil } func printGuestStructVerification(w io.Writer, sorted_type_names []string, types map[string]cpp_types.Type, host_arch, guest_arch cpp_types.Arch) error { for _, name := range sorted_type_names { typе := types[name] if typе.Kind(guest_arch) != cpp_types.Struct && typе.Kind(guest_arch) != cpp_types.Union { continue } if isInputCompatible(typе, host_arch, guest_arch) { continue } for _, cоnst := range []string{"", "const "} { _, err := fmt.Fprintf( w, "CHECK_STRUCT_LAYOUT(berberis::GuestType<%s berberis::%s>, %d, %d);\n", cоnst, name, typе.Bits(guest_arch), typе.Align(guest_arch)) if err != nil { return err } for i := uint(0); i < typе.NumField(cpp_types.FirstArch); i++ { field := typе.Field(i, guest_arch) field_type := field.Type() var field_name string var field_offset uint if typе.Kind(guest_arch) == cpp_types.Struct { field_name = field.Name() field_offset = field.(cpp_types.StructFieldInfo).Offset() } else { field_name = "uniоn." + field.Name() field_offset = 0 } _, err = fmt.Fprintf( w, "CHECK_FIELD_LAYOUT(berberis::GuestType<%s berberis::%s>, %s, %d, %d);\n", cоnst, name, field_name, field_offset, field_type.Bits(guest_arch)) if err != nil { return err } } _, err = fmt.Fprintln(w, "") if err != nil { return err } } } return nil } func printConvertOptionalStructures(w io.Writer, sorted_type_names []string, types map[string]cpp_types.Type, conversion map[string]*NeededConvertor, host_arch, guest_arch cpp_types.Arch) (err error) { var compatible_structs []string var convert_guest_in_structure []string var convert_host_in_structure []string var convert_guest_out_structure []string var convert_host_out_structure []string for _, name := range sorted_type_names { typе := types[name] if isUnion(typе) { continue } if !typе.(vulkan_xml.ExtendedStructInfo).OptionalStruct() { continue } optional_value := typе.(vulkan_xml.ExtendedStructInfo).OptionalValue() if cpp_types.IsInputCompatible(typе, host_arch, guest_arch) { compatible_structs = append(compatible_structs, "BERBERIS_"+optional_value) } if conversion[name].need_base_convertor { convert_guest_out_structure = append(convert_guest_out_structure, fmt.Sprintf(` case %[1]s: { auto new_holder = new (std::nothrow) GuestType<%[2]s*>::HostHolder; if (new_holder == nullptr) { out_of_memory = true; return nullptr; } new_holder->extensions_ = std::move(holder); holder.reset(new_holder); auto* converted_type = ToHostType(StaticCast>(StaticCast>(ptr)), *new_holder, out_of_memory); if (out_of_memory) { return nullptr; } return converted_type; } `, "BERBERIS_"+optional_value, name)) convert_host_out_structure = append(convert_host_out_structure, fmt.Sprintf(` case %[1]s: { auto new_holder = new (std::nothrow) GuestType<%[2]s*>::GuestHolder; if (new_holder == nullptr) { out_of_memory = true; return nullptr; } new_holder->extensions_ = std::move(holder); holder.reset(new_holder); auto converted_type = StaticCast>(GuestType<%[2]s*>(static_cast<%[2]s*>(static_cast(ptr)), *new_holder, out_of_memory)); if (out_of_memory) { return nullptr; } return converted_type; } `, "BERBERIS_"+optional_value, name)) } if conversion[name].need_const_convertor { convert_guest_in_structure = append(convert_guest_in_structure, fmt.Sprintf(` case %[1]s: { auto new_holder = new (std::nothrow) GuestType::HostHolder; if (new_holder == nullptr) { out_of_memory = true; return nullptr; } new_holder->extensions_ = std::move(holder); holder.reset(new_holder); auto* converted_type = ToHostType(StaticCast>(StaticCast>(ptr)), *new_holder, out_of_memory); if (out_of_memory) { return nullptr; } return converted_type; } `, "BERBERIS_"+optional_value, name)) convert_host_in_structure = append(convert_host_in_structure, fmt.Sprintf(` case %[1]s: { auto new_holder = new (std::nothrow) GuestType::GuestHolder; if (new_holder == nullptr) { out_of_memory = true; return nullptr; } new_holder->extensions_ = std::move(holder); holder.reset(new_holder); auto converted_type = StaticCast>(GuestType(static_cast(static_cast(ptr)), *new_holder, out_of_memory)); if (out_of_memory) { return nullptr; } return converted_type; } `, "BERBERIS_"+optional_value, name)) } } _, err = fmt.Fprintf(w, ` bool AreAllOptionalStructuresCompatible(GuestType head) { for (auto ptr = StaticCast>(head); ToGuestAddr(ptr) != kNullGuestAddr; ptr = ToHostAddr>(ToGuestAddr(ptr))->pNext) { switch (VkStructureType(ToHostAddr>(ToGuestAddr(ptr))->sType)) { case %[1]s: continue; default: return false; } } return true; } bool AreAllOptionalStructuresCompatible(GuestType head) { for (auto ptr = StaticCast>(head); ToGuestAddr(ptr) != kNullGuestAddr; ptr = ToHostAddr>(ToGuestAddr(ptr))->pNext) { switch (VkStructureType(ToHostAddr>(ToGuestAddr(ptr))->sType)) { case %[1]s: continue; default: return false; } } return true; } bool AreAllOptionalStructuresCompatible(const void* head) { for (auto* ptr = static_cast(head); ptr != nullptr; ptr = ptr->pNext) { switch (ptr->sType) { case %[1]s: continue; default: return false; } } return true; } bool AreAllOptionalStructuresCompatible(void* head) { for (auto* ptr = static_cast(head); ptr != nullptr; ptr = ptr->pNext) { switch (ptr->sType) { case %[1]s: continue; default: return false; } } return true; } const void* ConvertOptionalStructures(GuestType head, std::unique_ptr& holder, bool& out_of_memory) { if (AreAllOptionalStructuresCompatible(head)) { return ToHostAddr(head); } for (auto ptr = StaticCast>(head); ToGuestAddr(ptr) != kNullGuestAddr; ptr = ToHostAddr>(ToGuestAddr(ptr))->pNext) { switch (VkStructureType(ToHostAddr>(ToGuestAddr(ptr))->sType)) { %[2]s default: continue; } } return nullptr; } void* ConvertOptionalStructures(GuestType head, std::unique_ptr& holder, bool& out_of_memory) { if (AreAllOptionalStructuresCompatible(head)) { return ToHostAddr(head); } for (auto ptr = StaticCast>(head); ToGuestAddr(ptr) != kNullGuestAddr; ptr = ToHostAddr>(ToGuestAddr(ptr))->pNext) { switch (VkStructureType(ToHostAddr>(ToGuestAddr(ptr))->sType)) { %[3]s default: continue; } } return nullptr; } GuestType ConvertOptionalStructures(const void* head, std::unique_ptr& holder, bool& out_of_memory) { if (AreAllOptionalStructuresCompatible(head)) { return GuestType(head); } for (auto ptr = static_cast(head); ptr != nullptr; ptr = ptr->pNext) { switch (ptr->sType) { %[4]s default: continue; } } return GuestType(nullptr); } GuestType ConvertOptionalStructures(void* head, std::unique_ptr& holder, bool& out_of_memory) { if (AreAllOptionalStructuresCompatible(head)) { return GuestType(head); } for (auto ptr = static_cast(head); ptr != nullptr; ptr = ptr->pNext) { switch (ptr->sType) { %[5]s default: continue; } } return GuestType(nullptr); } `, strings.Join(compatible_structs, ":\n case "), strings.Join(convert_guest_in_structure, ""), strings.Join(convert_guest_out_structure, ""), strings.Join(convert_host_in_structure, ""), strings.Join(convert_host_out_structure, "")) if err != nil { return err } return nil } func printCustomTrampolies(w io.Writer, sorted_command_names []string, commands map[string]cpp_types.Type, host_arch, guest_arch cpp_types.Arch) (err error) { for _, name := range sorted_command_names { vfp := "" if guest_arch == cpp_types.Arm { vfp = ", GuestAbi::kAapcsVfp" } command := commands[name] param_names := []string{} variable_declarations := []string{} params_are_compatible := true for i := uint(0); i < command.NumField(cpp_types.FirstArch); i++ { param_name := command.Field(i, cpp_types.FirstArch).Name() param_type := command.Field(i, cpp_types.FirstArch).Type() param_names = append(param_names, param_name) if isInputCompatible(param_type, host_arch, guest_arch) { variable_declarations = append(variable_declarations, fmt.Sprintf( "%s = %s_guest", param_type.DeclareVar(param_name+"_host", cpp_types.FirstArch), param_name)) } else { params_are_compatible = false param_length := command.Field(i, cpp_types.FirstArch).BaseFieldInfo().(vulkan_xml.ExtendedFieldInfo).Length() if param_length == nil { variable_declarations = append( variable_declarations, fmt.Sprintf("GuestType<%s>::HostHolder %s_holder", param_type.Name(cpp_types.FirstArch), param_name), fmt.Sprintf( "%[1]s = ToHostType(%[2]s_guest, %[2]s_holder, out_of_memory)", param_type.DeclareVar(param_name+"_host", cpp_types.FirstArch), param_name)) } else { variable_declarations = append( variable_declarations, fmt.Sprintf("GuestType<%s>::HostArrayHolder %s_holder", param_type.Name(cpp_types.FirstArch), param_name), fmt.Sprintf( "%[1]s = ToHostType(%[2]s_guest, %[2]s_holder, %[3]s_host, out_of_memory)", param_type.DeclareVar(param_name+"_host", cpp_types.FirstArch), param_name, param_length.Name())) } } } // vkAllocateCommandBuffers and vkBeginCommandBuffer are manually written to handle awful pInheritanceInfo field. // The VkCommandBufferBeginInfo data structure doesn't include any clue which may allow someone to understand if // that field can be used or not. Instead that information is only available in entirely different time and place // when vkAllocateCommandBuffers functions is called. // To handle VkCommandBufferBeginInfo we need side-channel to pass that boolean from vkAllocateCommandBuffers to // vkBeginCommandBuffer, that's why we need to skip automatically generation of converter. if params_are_compatible || name == "vkAllocateCommandBuffers" || name == "vkBeginCommandBuffer" { continue } declare_ret := "" if command.Elem(cpp_types.FirstArch).Kind(cpp_types.FirstArch) != cpp_types.Void { declare_ret = fmt.Sprintf("auto&& [ret] = GuestReturnReference(state);\n ret = ", name, vfp) } _, err = fmt.Fprintf(w, `void DoCustomTrampolineWithThunk_%[1]s(HostCode callee, ProcessState* state) { PFN_%[1]s callee_function = AsFuncPtr(callee); auto [%[3]s_guest] = GuestParamsValues(state); [[maybe_unused]] bool out_of_memory; %[5]s; %[6]scallee_function(%[4]s_host); } `, name, vfp, strings.Join(param_names, "_guest, "), strings.Join(param_names, "_host, "), strings.Join(variable_declarations, ";\n "), declare_ret) if err != nil { return err } } return nil } func printCustomGuestRunners(w io.Writer, sorted_command_names []string, commands map[string]cpp_types.Type, host_arch, guest_arch cpp_types.Arch) (err error) { for _, name := range sorted_command_names { command := commands[name] param_names := []string{} variable_declarations := []string{} params_are_compatible := true for i := uint(0); i < command.NumField(cpp_types.FirstArch); i++ { param_name := command.Field(i, cpp_types.FirstArch).Name() param_type := command.Field(i, cpp_types.FirstArch).Type() param_names = append(param_names, param_name) if isInputCompatible(param_type, host_arch, guest_arch) { variable_declarations = append(variable_declarations, fmt.Sprintf( "%[1]s_guest = %[1]s_host", param_name)) } else { params_are_compatible = false param_length := command.Field(i, cpp_types.FirstArch).BaseFieldInfo().(vulkan_xml.ExtendedFieldInfo).Length() if param_length == nil { variable_declarations = append( variable_declarations, fmt.Sprintf("GuestType<%s>::GuestHolder %s_holder", param_type.Name(cpp_types.FirstArch), param_name), fmt.Sprintf( "%[2]s_guest = GuestType<%[1]s>(%[2]s_host, %[2]s_holder, out_of_memory)", param_type.Name(cpp_types.FirstArch), param_name)) } else { variable_declarations = append( variable_declarations, fmt.Sprintf("GuestType<%s>::GuestArrayHolder %s_holder", param_type.Name(cpp_types.FirstArch), param_name), fmt.Sprintf( "%[2]s_guest = GuestType<%[1]s>(%[2]s_host, %[2]s_holder, %[3]s_host, out_of_memory)", param_type.Name(cpp_types.FirstArch), param_name, param_length.Name())) } } } // vkCreateInstance needs additional non-trivial processing. if params_are_compatible || name == "vkCreateInstance" { continue } _, err = fmt.Fprintf(w, `void RunGuest_%[1]s(GuestAddr pc, GuestArgumentBuffer* buf) { auto [%[3]s_host] = HostArgumentsValues(buf); { [[maybe_unused]] bool out_of_memory; auto [%[2]s_guest] = GuestArgumentsReferences(buf); %[4]s; RunGuestCall(pc, buf); } } `, name, strings.Join(param_names, "_guest, "), strings.Join(param_names, "_host, "), strings.Join(variable_declarations, ";\n ")) if err != nil { return err } } return nil } func printExtensionsMap(w io.Writer, extensions map[string]int64) (err error) { names := make([]string, 0, len(extensions)) for extension := range extensions { names = append(names, fmt.Sprintf("{\"%s\", %d}", extension, extensions[extension])) } sort.Strings(names) _, err = fmt.Fprintf(w, ` struct ExtensionInfo { const char* name; uint32_t maxsupported_spec; }; [[maybe_unused]] auto& GetExtensionsMap() { #if defined(__i386__) static constexpr std::array map{ #else static constexpr std::array map{ #endif {%[3]s}}; static_assert(IsSorted(std::begin(map), std::end(map), StrCmpLessName)); return map; } `, len(extensions)-1, len(extensions), // Disable VK_EXT_device_memory_report extension only for arm32. strings.Replace( strings.Join(names, ",\n "), " {\"VK_EXT_device_memory_report\", 2},\n", `#if !defined(__i386__) {"VK_EXT_device_memory_report", 2}, #endif `, 1)) return nil } func printMaps(w io.Writer, sorted_command_names []string, commands map[string]cpp_types.Type, host_arch, guest_arch cpp_types.Arch) (err error) { command_trampolines := []string{} command_wrappers := []string{} for _, name := range sorted_command_names { command := commands[name] params_are_compatible := true switch name { // These functions are compatible based on signatures, but actually need special processing. case "vkGetDeviceProcAddr", "vkGetInstanceProcAddr": params_are_compatible = false } for i := uint(0); i < command.NumField(guest_arch); i++ { param_type := command.Field(i, guest_arch).Type() if !isInputCompatible(param_type, host_arch, guest_arch) { params_are_compatible = false break } } // Data structures in vkEnumerate{Device,Instance}ExtensionProperties are compatible, // but we need to filter out unsupported extensions. // Data structures in vkFreeCommandBuffers are compatible but we need to free memory // allocated for meta-information in vkAllocateCommandBuffers. if params_are_compatible && name != "vkEnumerateDeviceExtensionProperties" && name != "vkEnumerateInstanceExtensionProperties" && name != "vkFreeCommandBuffers" { if guest_arch == cpp_types.Arm { command_trampolines = append(command_trampolines, fmt.Sprintf( "{\"%s\", GetTrampolineFunc<%s, GuestAbi::kAapcsVfp>()}", name, command.Name(guest_arch))) } else { command_trampolines = append(command_trampolines, fmt.Sprintf( "{\"%s\", GetTrampolineFunc<%s>()}", name, command.Name(guest_arch))) } } else { command_trampolines = append(command_trampolines, fmt.Sprintf( "{\"%[1]s\", DoCustomTrampolineWithThunk_%[1]s}", name)) } // Data structures in vkEnumerate{Device,Instance}ExtensionProperties are compatible, // but we need to filter out unsupported extensions. if params_are_compatible && name != "vkEnumerateDeviceExtensionProperties" && name != "vkEnumerateInstanceExtensionProperties" { command_wrappers = append(command_wrappers, fmt.Sprintf( "{\"%[1]s\", [](GuestAddr pc) { return WrapGuestFunctionImpl(pc, kGuestFunctionWrapperSignature<%[2]s>, RunGuestCall, \"%[1]s\"); }}", name, command.Name(guest_arch))) } else { command_wrappers = append(command_wrappers, fmt.Sprintf( "{\"%[1]s\", [](GuestAddr pc) { return WrapGuestFunctionImpl(pc, kGuestFunctionWrapperSignature<%[2]s>, RunGuest_%[1]s, \"%[1]s\"); }}", name, command.Name(guest_arch))) } } _, err = fmt.Fprintf(w, `auto& GetMapForvkGetProcAddr() { static constexpr std::array map{ {%[2]s}}; static_assert(IsSorted(std::begin(map), std::end(map), StrCmpLessName)); return map; } auto& GetMapForRunGuestvkGetInstanceProcAddr() { static constexpr std::array map{ {%[3]s}}; static_assert(IsSorted(std::begin(map), std::end(map), StrCmpLessName)); return map; } `, len(sorted_command_names), strings.Join(command_trampolines, ",\n "), strings.Join(command_wrappers, ",\n ")) if err != nil { return err } return nil } // The same name as in generator.py in vulkan_headers package func toEnumNameWithSuffix(name, mark string) string { split_point := len(name) for isAsciiUpperCase(rune(name[split_point-1])) { split_point-- if split_point == 0 { split_point = len(name) break } } var enum_name_with_suffix string for _, runе := range name[0:split_point] { if isAsciiUpperCase(runе) && len(enum_name_with_suffix) > 0 && isAsciiLowerCase(rune(enum_name_with_suffix[len(enum_name_with_suffix)-1])) { enum_name_with_suffix += string('_') } enum_name_with_suffix += string(runе) } enum_name_with_suffix += "_" + mark if split_point != len(name) { enum_name_with_suffix += "_" + name[split_point:] } return strings.ToUpper(enum_name_with_suffix) } func isAsciiUpperCase(runе rune) bool { return 'A' <= runе && runе <= 'Z' } func isAsciiLowerCase(runе rune) bool { return '0' <= runе && runе <= '9' || 'a' <= runе && runе <= 'z' || runе == '_' } func isInputCompatible(typе cpp_types.Type, host_arch, guest_arch cpp_types.Arch) bool { if isPtrToAlias(typе) { return isInputCompatible(typе.Elem(cpp_types.FirstArch).Elem(cpp_types.FirstArch), host_arch, guest_arch) } if isPtrToConstAlias(typе) { return isInputCompatible(typе.Elem(cpp_types.FirstArch).Elem(cpp_types.FirstArch).Elem(cpp_types.FirstArch), host_arch, guest_arch) } if isPtrToStruct(typе) { return isInputCompatible(typе.Elem(cpp_types.FirstArch), host_arch, guest_arch) } if isPtrToConstStruct(typе) { return isInputCompatible(typе.Elem(cpp_types.FirstArch).Elem(cpp_types.FirstArch), host_arch, guest_arch) } // TODO(b/171255170): remove when arrays conversion in optional structures would be supported. if typе.Name(cpp_types.FirstArch) == "struct VkDrmFormatModifierProperties2EXT" { return true } // TODO(b/171255170): remove when conversion of optional structures in callbacks would be supported. if typе.Name(cpp_types.FirstArch) == "struct VkDebugUtilsMessengerCallbackDataEXT" || typе.Name(cpp_types.FirstArch) == "struct VkDeviceMemoryReportCallbackDataEXT" { return true } // TODO(b/322902403): Make VkGetLatencyMarkerInfoNV work with berberis. if typе.Name(cpp_types.FirstArch) == "struct VkGetLatencyMarkerInfoNV" { return true } if typе.Name(cpp_types.FirstArch) == "struct VkLatencyTimingsFrameReportNV" { return true } // TODO(b/322902053): Make VkFaultCallbackInfo work with berberis. if typе.Name(cpp_types.FirstArch) == "struct VkFaultData" { return true } // TODO(b/322902400): Make VkDescriptorBufferBindingInfoEXT work with berberis. if typе.Name(cpp_types.FirstArch) == "struct VkDescriptorBufferBindingInfoEXT" { return true } // If structure is extensible then it may always be extended with incompatible extension via it's “pNext” field if isExtensibleType(typе) { return false } return cpp_types.IsInputCompatible(typе, host_arch, guest_arch) } func isExtensibleType(typе cpp_types.Type) bool { if isPtrToAlias(typе) { return isExtensibleType(typе.Elem(cpp_types.FirstArch).Elem(cpp_types.FirstArch)) } if isPtrToConstAlias(typе) { return isExtensibleType(typе.Elem(cpp_types.FirstArch).Elem(cpp_types.FirstArch).Elem(cpp_types.FirstArch)) } if isPtrToStruct(typе) { return isExtensibleType(typе.Elem(cpp_types.FirstArch)) } if isPtrToConstStruct(typе) { return isExtensibleType(typе.Elem(cpp_types.FirstArch).Elem(cpp_types.FirstArch)) } if isStruct(typе) && (typе.(vulkan_xml.ExtendedStructInfo).OptionalStruct() || (typе.Field(0, cpp_types.FirstArch).Name() == "sType" && typе.Field(1, cpp_types.FirstArch).Name() == "pNext")) { return true } // Union or struct by itself may not be extensible, but it may include pointers to structs that are extensible, we have to check for that case, too. if isUnion(typе) || isStruct(typе) { ids := typе.NumField(cpp_types.FirstArch) for id := uint(0); id < ids; id++ { if isExtensibleType(typе.Field(id, cpp_types.FirstArch).Type()) { return true } } } return false } func isAlias(typе cpp_types.Type) bool { return cpp_types.IsKind(typе, []cpp_types.Kind{cpp_types.Alias}) } func isAliasOfEnum(typе cpp_types.Type) bool { return cpp_types.IsKind(typе, []cpp_types.Kind{cpp_types.Alias, cpp_types.Enum}) } func isAliasOfOpaque(typе cpp_types.Type) bool { return cpp_types.IsKind(typе, []cpp_types.Kind{cpp_types.Alias, cpp_types.Opaque}) } func isArray(typе cpp_types.Type) bool { return cpp_types.IsKind(typе, []cpp_types.Kind{cpp_types.Array}) } func isConst(typе cpp_types.Type) bool { return cpp_types.IsKind(typе, []cpp_types.Kind{cpp_types.Const}) } func isConstPtr(typе cpp_types.Type) bool { return cpp_types.IsKind(typе, []cpp_types.Kind{cpp_types.Const, cpp_types.Ptr}) } func isConstPtrToFunc(typе cpp_types.Type) bool { return cpp_types.IsKind(typе, []cpp_types.Kind{cpp_types.Const, cpp_types.Ptr, cpp_types.Func}) } func isEnum(typе cpp_types.Type) bool { return cpp_types.IsKind(typе, []cpp_types.Kind{cpp_types.Enum}) } func isFunc(typе cpp_types.Type) bool { return cpp_types.IsKind(typе, []cpp_types.Kind{cpp_types.Func}) } func isInt8T(typе cpp_types.Type) bool { return cpp_types.IsKind(typе, []cpp_types.Kind{cpp_types.Int8T}) } func isInt16T(typе cpp_types.Type) bool { return cpp_types.IsKind(typе, []cpp_types.Kind{cpp_types.Int16T}) } func isInt32T(typе cpp_types.Type) bool { return cpp_types.IsKind(typе, []cpp_types.Kind{cpp_types.Int32T}) } func isInt64T(typе cpp_types.Type) bool { return cpp_types.IsKind(typе, []cpp_types.Kind{cpp_types.Int64T}) } func isPtr(typе cpp_types.Type) bool { return cpp_types.IsKind(typе, []cpp_types.Kind{cpp_types.Ptr}) } func isPtrToAlias(typе cpp_types.Type) bool { return cpp_types.IsKind(typе, []cpp_types.Kind{cpp_types.Ptr, cpp_types.Alias}) } func isPtrToConstAlias(typе cpp_types.Type) bool { return cpp_types.IsKind(typе, []cpp_types.Kind{cpp_types.Ptr, cpp_types.Const, cpp_types.Alias}) } func isPtrToConst(typе cpp_types.Type) bool { return cpp_types.IsKind(typе, []cpp_types.Kind{cpp_types.Ptr, cpp_types.Const}) } func isPtrToConstOpaque(typе cpp_types.Type) bool { return cpp_types.IsKind(typе, []cpp_types.Kind{cpp_types.Ptr, cpp_types.Const, cpp_types.Opaque}) } func isPtrToConstStruct(typе cpp_types.Type) bool { return cpp_types.IsKind(typе, []cpp_types.Kind{cpp_types.Ptr, cpp_types.Const, cpp_types.Struct}) } func isPtrToFunc(typе cpp_types.Type) bool { return cpp_types.IsKind(typе, []cpp_types.Kind{cpp_types.Ptr, cpp_types.Func}) } func isPtrToOpaque(typе cpp_types.Type) bool { return cpp_types.IsKind(typе, []cpp_types.Kind{cpp_types.Ptr, cpp_types.Opaque}) } func isPtrToStruct(typе cpp_types.Type) bool { return cpp_types.IsKind(typе, []cpp_types.Kind{cpp_types.Ptr, cpp_types.Struct}) } func isStruct(typе cpp_types.Type) bool { return cpp_types.IsKind(typе, []cpp_types.Kind{cpp_types.Struct}) } func isUInt8T(typе cpp_types.Type) bool { return cpp_types.IsKind(typе, []cpp_types.Kind{cpp_types.UInt8T}) } func isUInt16T(typе cpp_types.Type) bool { return cpp_types.IsKind(typе, []cpp_types.Kind{cpp_types.UInt16T}) } func isUInt32T(typе cpp_types.Type) bool { return cpp_types.IsKind(typе, []cpp_types.Kind{cpp_types.UInt32T}) } func isUInt64T(typе cpp_types.Type) bool { return cpp_types.IsKind(typе, []cpp_types.Kind{cpp_types.UInt64T}) } func isUnion(typе cpp_types.Type) bool { return cpp_types.IsKind(typе, []cpp_types.Kind{cpp_types.Union}) } func toIntVal(b bool) int { if b { return 1 } else { return 0 } }