OpenCL C++ Bindings
cl2.hpp
Go to the documentation of this file.
1 //
2 // Copyright (c) 2008-2020 The Khronos Group Inc.
3 //
4 // Licensed under the Apache License, Version 2.0 (the "License");
5 // you may not use this file except in compliance with the License.
6 // You may obtain a copy of the License at
7 //
8 // http://www.apache.org/licenses/LICENSE-2.0
9 //
10 // Unless required by applicable law or agreed to in writing, software
11 // distributed under the License is distributed on an "AS IS" BASIS,
12 // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 // See the License for the specific language governing permissions and
14 // limitations under the License.
15 //
16 
312 
325 
389 #ifndef CL_HPP_
390 #define CL_HPP_
391 
392 /* Handle deprecated preprocessor definitions. In each case, we only check for
393  * the old name if the new name is not defined, so that user code can define
394  * both and hence work with either version of the bindings.
395  */
396 #if !defined(CL_HPP_USE_DX_INTEROP) && defined(USE_DX_INTEROP)
397 # pragma message("cl2.hpp: USE_DX_INTEROP is deprecated. Define CL_HPP_USE_DX_INTEROP instead")
398 # define CL_HPP_USE_DX_INTEROP
399 #endif
400 #if !defined(CL_HPP_USE_CL_DEVICE_FISSION) && defined(USE_CL_DEVICE_FISSION)
401 # pragma message("cl2.hpp: USE_CL_DEVICE_FISSION is deprecated. Define CL_HPP_USE_CL_DEVICE_FISSION instead")
402 # define CL_HPP_USE_CL_DEVICE_FISSION
403 #endif
404 #if !defined(CL_HPP_ENABLE_EXCEPTIONS) && defined(__CL_ENABLE_EXCEPTIONS)
405 # pragma message("cl2.hpp: __CL_ENABLE_EXCEPTIONS is deprecated. Define CL_HPP_ENABLE_EXCEPTIONS instead")
406 # define CL_HPP_ENABLE_EXCEPTIONS
407 #endif
408 #if !defined(CL_HPP_NO_STD_VECTOR) && defined(__NO_STD_VECTOR)
409 # pragma message("cl2.hpp: __NO_STD_VECTOR is deprecated. Define CL_HPP_NO_STD_VECTOR instead")
410 # define CL_HPP_NO_STD_VECTOR
411 #endif
412 #if !defined(CL_HPP_NO_STD_STRING) && defined(__NO_STD_STRING)
413 # pragma message("cl2.hpp: __NO_STD_STRING is deprecated. Define CL_HPP_NO_STD_STRING instead")
414 # define CL_HPP_NO_STD_STRING
415 #endif
416 #if defined(VECTOR_CLASS)
417 # pragma message("cl2.hpp: VECTOR_CLASS is deprecated. Alias cl::vector instead")
418 #endif
419 #if defined(STRING_CLASS)
420 # pragma message("cl2.hpp: STRING_CLASS is deprecated. Alias cl::string instead.")
421 #endif
422 #if !defined(CL_HPP_USER_OVERRIDE_ERROR_STRINGS) && defined(__CL_USER_OVERRIDE_ERROR_STRINGS)
423 # pragma message("cl2.hpp: __CL_USER_OVERRIDE_ERROR_STRINGS is deprecated. Define CL_HPP_USER_OVERRIDE_ERROR_STRINGS instead")
424 # define CL_HPP_USER_OVERRIDE_ERROR_STRINGS
425 #endif
426 
427 /* Warn about features that are no longer supported
428  */
429 #if defined(__USE_DEV_VECTOR)
430 # pragma message("cl2.hpp: __USE_DEV_VECTOR is no longer supported. Expect compilation errors")
431 #endif
432 #if defined(__USE_DEV_STRING)
433 # pragma message("cl2.hpp: __USE_DEV_STRING is no longer supported. Expect compilation errors")
434 #endif
435 
436 /* Detect which version to target */
437 #if !defined(CL_HPP_TARGET_OPENCL_VERSION)
438 # pragma message("cl2.hpp: CL_HPP_TARGET_OPENCL_VERSION is not defined. It will default to 220 (OpenCL 2.2)")
439 # define CL_HPP_TARGET_OPENCL_VERSION 220
440 #endif
441 #if CL_HPP_TARGET_OPENCL_VERSION != 100 && \
442  CL_HPP_TARGET_OPENCL_VERSION != 110 && \
443  CL_HPP_TARGET_OPENCL_VERSION != 120 && \
444  CL_HPP_TARGET_OPENCL_VERSION != 200 && \
445  CL_HPP_TARGET_OPENCL_VERSION != 210 && \
446  CL_HPP_TARGET_OPENCL_VERSION != 220 && \
447  CL_HPP_TARGET_OPENCL_VERSION != 300
448 # pragma message("cl2.hpp: CL_HPP_TARGET_OPENCL_VERSION is not a valid value (100, 110, 120, 200, 210, 220 or 300). It will be set to 220")
449 # undef CL_HPP_TARGET_OPENCL_VERSION
450 # define CL_HPP_TARGET_OPENCL_VERSION 220
451 #endif
452 
453 /* Forward target OpenCL version to C headers if necessary */
454 #if defined(CL_TARGET_OPENCL_VERSION)
455 /* Warn if prior definition of CL_TARGET_OPENCL_VERSION is lower than
456  * requested C++ bindings version */
457 #if CL_TARGET_OPENCL_VERSION < CL_HPP_TARGET_OPENCL_VERSION
458 # pragma message("CL_TARGET_OPENCL_VERSION is already defined as is lower than CL_HPP_TARGET_OPENCL_VERSION")
459 #endif
460 #else
461 # define CL_TARGET_OPENCL_VERSION CL_HPP_TARGET_OPENCL_VERSION
462 #endif
463 
464 #if !defined(CL_HPP_MINIMUM_OPENCL_VERSION)
465 # define CL_HPP_MINIMUM_OPENCL_VERSION 200
466 #endif
467 #if CL_HPP_MINIMUM_OPENCL_VERSION != 100 && \
468  CL_HPP_MINIMUM_OPENCL_VERSION != 110 && \
469  CL_HPP_MINIMUM_OPENCL_VERSION != 120 && \
470  CL_HPP_MINIMUM_OPENCL_VERSION != 200 && \
471  CL_HPP_MINIMUM_OPENCL_VERSION != 210 && \
472  CL_HPP_MINIMUM_OPENCL_VERSION != 220 && \
473  CL_HPP_MINIMUM_OPENCL_VERSION != 300
474 # pragma message("cl2.hpp: CL_HPP_MINIMUM_OPENCL_VERSION is not a valid value (100, 110, 120, 200, 210, 220 or 300). It will be set to 100")
475 # undef CL_HPP_MINIMUM_OPENCL_VERSION
476 # define CL_HPP_MINIMUM_OPENCL_VERSION 100
477 #endif
478 #if CL_HPP_MINIMUM_OPENCL_VERSION > CL_HPP_TARGET_OPENCL_VERSION
479 # error "CL_HPP_MINIMUM_OPENCL_VERSION must not be greater than CL_HPP_TARGET_OPENCL_VERSION"
480 #endif
481 
482 #if CL_HPP_MINIMUM_OPENCL_VERSION <= 100 && !defined(CL_USE_DEPRECATED_OPENCL_1_0_APIS)
483 # define CL_USE_DEPRECATED_OPENCL_1_0_APIS
484 #endif
485 #if CL_HPP_MINIMUM_OPENCL_VERSION <= 110 && !defined(CL_USE_DEPRECATED_OPENCL_1_1_APIS)
486 # define CL_USE_DEPRECATED_OPENCL_1_1_APIS
487 #endif
488 #if CL_HPP_MINIMUM_OPENCL_VERSION <= 120 && !defined(CL_USE_DEPRECATED_OPENCL_1_2_APIS)
489 # define CL_USE_DEPRECATED_OPENCL_1_2_APIS
490 #endif
491 #if CL_HPP_MINIMUM_OPENCL_VERSION <= 200 && !defined(CL_USE_DEPRECATED_OPENCL_2_0_APIS)
492 # define CL_USE_DEPRECATED_OPENCL_2_0_APIS
493 #endif
494 #if CL_HPP_MINIMUM_OPENCL_VERSION <= 210 && !defined(CL_USE_DEPRECATED_OPENCL_2_1_APIS)
495 # define CL_USE_DEPRECATED_OPENCL_2_1_APIS
496 #endif
497 #if CL_HPP_MINIMUM_OPENCL_VERSION <= 220 && !defined(CL_USE_DEPRECATED_OPENCL_2_2_APIS)
498 # define CL_USE_DEPRECATED_OPENCL_2_2_APIS
499 #endif
500 
501 #ifdef _WIN32
502 
503 #include <malloc.h>
504 
505 #if defined(CL_HPP_USE_DX_INTEROP)
506 #include <CL/cl_d3d10.h>
507 #include <CL/cl_dx9_media_sharing.h>
508 #endif
509 #endif // _WIN32
510 
511 #if defined(_MSC_VER)
512 #include <intrin.h>
513 #endif // _MSC_VER
514 
515  // Check for a valid C++ version
516 
517 // Need to do both tests here because for some reason __cplusplus is not
518 // updated in visual studio
519 #if (!defined(_MSC_VER) && __cplusplus < 201103L) || (defined(_MSC_VER) && _MSC_VER < 1700)
520 #error Visual studio 2013 or another C++11-supporting compiler required
521 #endif
522 
523 //
524 #if defined(CL_HPP_USE_CL_DEVICE_FISSION) || defined(CL_HPP_USE_CL_SUB_GROUPS_KHR)
525 #include <CL/cl_ext.h>
526 #endif
527 
528 #if defined(__APPLE__) || defined(__MACOSX)
529 #include <OpenCL/opencl.h>
530 #else
531 #include <CL/opencl.h>
532 #endif // !__APPLE__
533 
534 #if (__cplusplus >= 201103L || _MSVC_LANG >= 201103L )
535 #define CL_HPP_NOEXCEPT_ noexcept
536 #else
537 #define CL_HPP_NOEXCEPT_
538 #endif
539 
540 #if __cplusplus >= 201703L
541 # define CL_HPP_DEFINE_STATIC_MEMBER_ inline
542 #elif defined(_MSC_VER)
543 # define CL_HPP_DEFINE_STATIC_MEMBER_ __declspec(selectany)
544 #elif defined(__MINGW32__)
545 # define CL_HPP_DEFINE_STATIC_MEMBER_ __attribute__((selectany))
546 #else
547 # define CL_HPP_DEFINE_STATIC_MEMBER_ __attribute__((weak))
548 #endif // !_MSC_VER
549 
550 // Define deprecated prefixes and suffixes to ensure compilation
551 // in case they are not pre-defined
552 #if !defined(CL_EXT_PREFIX__VERSION_1_1_DEPRECATED)
553 #define CL_EXT_PREFIX__VERSION_1_1_DEPRECATED
554 #endif // #if !defined(CL_EXT_PREFIX__VERSION_1_1_DEPRECATED)
555 #if !defined(CL_EXT_SUFFIX__VERSION_1_1_DEPRECATED)
556 #define CL_EXT_SUFFIX__VERSION_1_1_DEPRECATED
557 #endif // #if !defined(CL_EXT_PREFIX__VERSION_1_1_DEPRECATED)
558 
559 #if !defined(CL_EXT_PREFIX__VERSION_1_2_DEPRECATED)
560 #define CL_EXT_PREFIX__VERSION_1_2_DEPRECATED
561 #endif // #if !defined(CL_EXT_PREFIX__VERSION_1_2_DEPRECATED)
562 #if !defined(CL_EXT_SUFFIX__VERSION_1_2_DEPRECATED)
563 #define CL_EXT_SUFFIX__VERSION_1_2_DEPRECATED
564 #endif // #if !defined(CL_EXT_PREFIX__VERSION_1_2_DEPRECATED)
565 
566 #if !defined(CL_CALLBACK)
567 #define CL_CALLBACK
568 #endif //CL_CALLBACK
569 
570 #include <utility>
571 #include <limits>
572 #include <iterator>
573 #include <mutex>
574 #include <cstring>
575 #include <functional>
576 
577 
578 // Define a size_type to represent a correctly resolved size_t
579 #if defined(CL_HPP_ENABLE_SIZE_T_COMPATIBILITY)
580 namespace cl {
581  using size_type = ::size_t;
582 } // namespace cl
583 #else // #if defined(CL_HPP_ENABLE_SIZE_T_COMPATIBILITY)
584 namespace cl {
585  using size_type = size_t;
586 } // namespace cl
587 #endif // #if defined(CL_HPP_ENABLE_SIZE_T_COMPATIBILITY)
588 
589 
590 #if defined(CL_HPP_ENABLE_EXCEPTIONS)
591 #include <exception>
592 #endif // #if defined(CL_HPP_ENABLE_EXCEPTIONS)
593 
594 #if !defined(CL_HPP_NO_STD_VECTOR)
595 #include <vector>
596 namespace cl {
597  template < class T, class Alloc = std::allocator<T> >
598  using vector = std::vector<T, Alloc>;
599 } // namespace cl
600 #endif // #if !defined(CL_HPP_NO_STD_VECTOR)
601 
602 #if !defined(CL_HPP_NO_STD_STRING)
603 #include <string>
604 namespace cl {
605  using string = std::string;
606 } // namespace cl
607 #endif // #if !defined(CL_HPP_NO_STD_STRING)
608 
609 #if CL_HPP_TARGET_OPENCL_VERSION >= 200
610 
611 #if !defined(CL_HPP_NO_STD_UNIQUE_PTR)
612 #include <memory>
613 namespace cl {
614  // Replace unique_ptr and allocate_pointer for internal use
615  // to allow user to replace them
616  template<class T, class D>
617  using pointer = std::unique_ptr<T, D>;
618 } // namespace cl
619 #endif
620 #endif // #if CL_HPP_TARGET_OPENCL_VERSION >= 200
621 #if !defined(CL_HPP_NO_STD_ARRAY)
622 #include <array>
623 namespace cl {
624  template < class T, size_type N >
625  using array = std::array<T, N>;
626 } // namespace cl
627 #endif // #if !defined(CL_HPP_NO_STD_ARRAY)
628 
629 // Define size_type appropriately to allow backward-compatibility
630 // use of the old size_t interface class
631 #if defined(CL_HPP_ENABLE_SIZE_T_COMPATIBILITY)
632 namespace cl {
633  namespace compatibility {
638  template <int N>
639  class size_t
640  {
641  private:
642  size_type data_[N];
643 
644  public:
646  size_t()
647  {
648  for (int i = 0; i < N; ++i) {
649  data_[i] = 0;
650  }
651  }
652 
653  size_t(const array<size_type, N> &rhs)
654  {
655  for (int i = 0; i < N; ++i) {
656  data_[i] = rhs[i];
657  }
658  }
659 
660  size_type& operator[](int index)
661  {
662  return data_[index];
663  }
664 
665  const size_type& operator[](int index) const
666  {
667  return data_[index];
668  }
669 
671  operator size_type* () { return data_; }
672 
674  operator const size_type* () const { return data_; }
675 
676  operator array<size_type, N>() const
677  {
678  array<size_type, N> ret;
679 
680  for (int i = 0; i < N; ++i) {
681  ret[i] = data_[i];
682  }
683  return ret;
684  }
685  };
686  } // namespace compatibility
687 
688  template<int N>
689  using size_t = compatibility::size_t<N>;
690 } // namespace cl
691 #endif // #if defined(CL_HPP_ENABLE_SIZE_T_COMPATIBILITY)
692 
693 // Helper alias to avoid confusing the macros
694 namespace cl {
695  namespace detail {
696  using size_t_array = array<size_type, 3>;
697  } // namespace detail
698 } // namespace cl
699 
700 
706 namespace cl {
707  class Memory;
708 
709 #define CL_HPP_INIT_CL_EXT_FCN_PTR_(name) \
710  if (!pfn_##name) { \
711  pfn_##name = (PFN_##name) \
712  clGetExtensionFunctionAddress(#name); \
713  if (!pfn_##name) { \
714  } \
715  }
716 
717 #define CL_HPP_INIT_CL_EXT_FCN_PTR_PLATFORM_(platform, name) \
718  if (!pfn_##name) { \
719  pfn_##name = (PFN_##name) \
720  clGetExtensionFunctionAddressForPlatform(platform, #name); \
721  if (!pfn_##name) { \
722  } \
723  }
724 
725  class Program;
726  class Device;
727  class Context;
728  class CommandQueue;
729  class DeviceCommandQueue;
730  class Memory;
731  class Buffer;
732  class Pipe;
733 
734 #if defined(CL_HPP_ENABLE_EXCEPTIONS)
735 
739  class Error : public std::exception
740  {
741  private:
742  cl_int err_;
743  const char * errStr_;
744  public:
754  Error(cl_int err, const char * errStr = NULL) : err_(err), errStr_(errStr)
755  {}
756 
757  ~Error() throw() {}
758 
763  virtual const char * what() const throw ()
764  {
765  if (errStr_ == NULL) {
766  return "empty";
767  }
768  else {
769  return errStr_;
770  }
771  }
772 
777  cl_int err(void) const { return err_; }
778  };
779 #define CL_HPP_ERR_STR_(x) #x
780 #else
781 #define CL_HPP_ERR_STR_(x) NULL
782 #endif // CL_HPP_ENABLE_EXCEPTIONS
783 
784 
785 namespace detail
786 {
787 #if defined(CL_HPP_ENABLE_EXCEPTIONS)
788 static inline cl_int errHandler (
789  cl_int err,
790  const char * errStr = NULL)
791 {
792  if (err != CL_SUCCESS) {
793  throw Error(err, errStr);
794  }
795  return err;
796 }
797 #else
798 static inline cl_int errHandler (cl_int err, const char * errStr = NULL)
799 {
800  (void) errStr; // suppress unused variable warning
801  return err;
802 }
803 #endif // CL_HPP_ENABLE_EXCEPTIONS
804 }
805 
806 
807 
809 #if !defined(CL_HPP_USER_OVERRIDE_ERROR_STRINGS)
810 #define __GET_DEVICE_INFO_ERR CL_HPP_ERR_STR_(clGetDeviceInfo)
811 #define __GET_PLATFORM_INFO_ERR CL_HPP_ERR_STR_(clGetPlatformInfo)
812 #define __GET_DEVICE_IDS_ERR CL_HPP_ERR_STR_(clGetDeviceIDs)
813 #define __GET_PLATFORM_IDS_ERR CL_HPP_ERR_STR_(clGetPlatformIDs)
814 #define __GET_CONTEXT_INFO_ERR CL_HPP_ERR_STR_(clGetContextInfo)
815 #define __GET_EVENT_INFO_ERR CL_HPP_ERR_STR_(clGetEventInfo)
816 #define __GET_EVENT_PROFILE_INFO_ERR CL_HPP_ERR_STR_(clGetEventProfileInfo)
817 #define __GET_MEM_OBJECT_INFO_ERR CL_HPP_ERR_STR_(clGetMemObjectInfo)
818 #define __GET_IMAGE_INFO_ERR CL_HPP_ERR_STR_(clGetImageInfo)
819 #define __GET_SAMPLER_INFO_ERR CL_HPP_ERR_STR_(clGetSamplerInfo)
820 #define __GET_KERNEL_INFO_ERR CL_HPP_ERR_STR_(clGetKernelInfo)
821 #if CL_HPP_TARGET_OPENCL_VERSION >= 120
822 #define __GET_KERNEL_ARG_INFO_ERR CL_HPP_ERR_STR_(clGetKernelArgInfo)
823 #endif // CL_HPP_TARGET_OPENCL_VERSION >= 120
824 #if CL_HPP_TARGET_OPENCL_VERSION >= 200
825 #define __GET_KERNEL_SUB_GROUP_INFO_ERR CL_HPP_ERR_STR_(clGetKernelSubGroupInfo)
826 #endif // CL_HPP_TARGET_OPENCL_VERSION >= 200
827 #define __GET_KERNEL_WORK_GROUP_INFO_ERR CL_HPP_ERR_STR_(clGetKernelWorkGroupInfo)
828 #define __GET_PROGRAM_INFO_ERR CL_HPP_ERR_STR_(clGetProgramInfo)
829 #define __GET_PROGRAM_BUILD_INFO_ERR CL_HPP_ERR_STR_(clGetProgramBuildInfo)
830 #define __GET_COMMAND_QUEUE_INFO_ERR CL_HPP_ERR_STR_(clGetCommandQueueInfo)
831 
832 #define __CREATE_CONTEXT_ERR CL_HPP_ERR_STR_(clCreateContext)
833 #define __CREATE_CONTEXT_FROM_TYPE_ERR CL_HPP_ERR_STR_(clCreateContextFromType)
834 #define __GET_SUPPORTED_IMAGE_FORMATS_ERR CL_HPP_ERR_STR_(clGetSupportedImageFormats)
835 
836 #define __CREATE_BUFFER_ERR CL_HPP_ERR_STR_(clCreateBuffer)
837 #define __COPY_ERR CL_HPP_ERR_STR_(cl::copy)
838 #define __CREATE_SUBBUFFER_ERR CL_HPP_ERR_STR_(clCreateSubBuffer)
839 #define __CREATE_GL_BUFFER_ERR CL_HPP_ERR_STR_(clCreateFromGLBuffer)
840 #define __CREATE_GL_RENDER_BUFFER_ERR CL_HPP_ERR_STR_(clCreateFromGLBuffer)
841 #define __GET_GL_OBJECT_INFO_ERR CL_HPP_ERR_STR_(clGetGLObjectInfo)
842 #if CL_HPP_TARGET_OPENCL_VERSION >= 120
843 #define __CREATE_IMAGE_ERR CL_HPP_ERR_STR_(clCreateImage)
844 #define __CREATE_GL_TEXTURE_ERR CL_HPP_ERR_STR_(clCreateFromGLTexture)
845 #define __IMAGE_DIMENSION_ERR CL_HPP_ERR_STR_(Incorrect image dimensions)
846 #endif // CL_HPP_TARGET_OPENCL_VERSION >= 120
847 #define __SET_MEM_OBJECT_DESTRUCTOR_CALLBACK_ERR CL_HPP_ERR_STR_(clSetMemObjectDestructorCallback)
848 
849 #define __CREATE_USER_EVENT_ERR CL_HPP_ERR_STR_(clCreateUserEvent)
850 #define __SET_USER_EVENT_STATUS_ERR CL_HPP_ERR_STR_(clSetUserEventStatus)
851 #define __SET_EVENT_CALLBACK_ERR CL_HPP_ERR_STR_(clSetEventCallback)
852 #define __WAIT_FOR_EVENTS_ERR CL_HPP_ERR_STR_(clWaitForEvents)
853 
854 #define __CREATE_KERNEL_ERR CL_HPP_ERR_STR_(clCreateKernel)
855 #define __SET_KERNEL_ARGS_ERR CL_HPP_ERR_STR_(clSetKernelArg)
856 #define __CREATE_PROGRAM_WITH_SOURCE_ERR CL_HPP_ERR_STR_(clCreateProgramWithSource)
857 #if CL_HPP_TARGET_OPENCL_VERSION >= 200
858 #define __CREATE_PROGRAM_WITH_IL_ERR CL_HPP_ERR_STR_(clCreateProgramWithIL)
859 #endif // #if CL_HPP_TARGET_OPENCL_VERSION >= 200
860 #define __CREATE_PROGRAM_WITH_BINARY_ERR CL_HPP_ERR_STR_(clCreateProgramWithBinary)
861 #if CL_HPP_TARGET_OPENCL_VERSION >= 210
862 #define __CREATE_PROGRAM_WITH_IL_ERR CL_HPP_ERR_STR_(clCreateProgramWithIL)
863 #endif // CL_HPP_TARGET_OPENCL_VERSION >= 210
864 #if CL_HPP_TARGET_OPENCL_VERSION >= 120
865 #define __CREATE_PROGRAM_WITH_BUILT_IN_KERNELS_ERR CL_HPP_ERR_STR_(clCreateProgramWithBuiltInKernels)
866 #endif // CL_HPP_TARGET_OPENCL_VERSION >= 120
867 #define __BUILD_PROGRAM_ERR CL_HPP_ERR_STR_(clBuildProgram)
868 #if CL_HPP_TARGET_OPENCL_VERSION >= 120
869 #define __COMPILE_PROGRAM_ERR CL_HPP_ERR_STR_(clCompileProgram)
870 #define __LINK_PROGRAM_ERR CL_HPP_ERR_STR_(clLinkProgram)
871 #endif // CL_HPP_TARGET_OPENCL_VERSION >= 120
872 #define __CREATE_KERNELS_IN_PROGRAM_ERR CL_HPP_ERR_STR_(clCreateKernelsInProgram)
873 
874 #if CL_HPP_TARGET_OPENCL_VERSION >= 200
875 #define __CREATE_COMMAND_QUEUE_WITH_PROPERTIES_ERR CL_HPP_ERR_STR_(clCreateCommandQueueWithProperties)
876 #define __CREATE_SAMPLER_WITH_PROPERTIES_ERR CL_HPP_ERR_STR_(clCreateSamplerWithProperties)
877 #endif // CL_HPP_TARGET_OPENCL_VERSION >= 200
878 #define __SET_COMMAND_QUEUE_PROPERTY_ERR CL_HPP_ERR_STR_(clSetCommandQueueProperty)
879 #define __ENQUEUE_READ_BUFFER_ERR CL_HPP_ERR_STR_(clEnqueueReadBuffer)
880 #define __ENQUEUE_READ_BUFFER_RECT_ERR CL_HPP_ERR_STR_(clEnqueueReadBufferRect)
881 #define __ENQUEUE_WRITE_BUFFER_ERR CL_HPP_ERR_STR_(clEnqueueWriteBuffer)
882 #define __ENQUEUE_WRITE_BUFFER_RECT_ERR CL_HPP_ERR_STR_(clEnqueueWriteBufferRect)
883 #define __ENQEUE_COPY_BUFFER_ERR CL_HPP_ERR_STR_(clEnqueueCopyBuffer)
884 #define __ENQEUE_COPY_BUFFER_RECT_ERR CL_HPP_ERR_STR_(clEnqueueCopyBufferRect)
885 #define __ENQUEUE_FILL_BUFFER_ERR CL_HPP_ERR_STR_(clEnqueueFillBuffer)
886 #define __ENQUEUE_READ_IMAGE_ERR CL_HPP_ERR_STR_(clEnqueueReadImage)
887 #define __ENQUEUE_WRITE_IMAGE_ERR CL_HPP_ERR_STR_(clEnqueueWriteImage)
888 #define __ENQUEUE_COPY_IMAGE_ERR CL_HPP_ERR_STR_(clEnqueueCopyImage)
889 #define __ENQUEUE_FILL_IMAGE_ERR CL_HPP_ERR_STR_(clEnqueueFillImage)
890 #define __ENQUEUE_COPY_IMAGE_TO_BUFFER_ERR CL_HPP_ERR_STR_(clEnqueueCopyImageToBuffer)
891 #define __ENQUEUE_COPY_BUFFER_TO_IMAGE_ERR CL_HPP_ERR_STR_(clEnqueueCopyBufferToImage)
892 #define __ENQUEUE_MAP_BUFFER_ERR CL_HPP_ERR_STR_(clEnqueueMapBuffer)
893 #define __ENQUEUE_MAP_IMAGE_ERR CL_HPP_ERR_STR_(clEnqueueMapImage)
894 #define __ENQUEUE_UNMAP_MEM_OBJECT_ERR CL_HPP_ERR_STR_(clEnqueueUnMapMemObject)
895 #define __ENQUEUE_NDRANGE_KERNEL_ERR CL_HPP_ERR_STR_(clEnqueueNDRangeKernel)
896 #define __ENQUEUE_NATIVE_KERNEL CL_HPP_ERR_STR_(clEnqueueNativeKernel)
897 #if CL_HPP_TARGET_OPENCL_VERSION >= 120
898 #define __ENQUEUE_MIGRATE_MEM_OBJECTS_ERR CL_HPP_ERR_STR_(clEnqueueMigrateMemObjects)
899 #endif // CL_HPP_TARGET_OPENCL_VERSION >= 120
900 #if CL_HPP_TARGET_OPENCL_VERSION >= 210
901 #define __ENQUEUE_MIGRATE_SVM_ERR CL_HPP_ERR_STR_(clEnqueueSVMMigrateMem)
902 #define __SET_DEFAULT_DEVICE_COMMAND_QUEUE_ERR CL_HPP_ERR_STR_(clSetDefaultDeviceCommandQueue)
903 #endif // CL_HPP_TARGET_OPENCL_VERSION >= 210
904 
905 
906 #define __ENQUEUE_ACQUIRE_GL_ERR CL_HPP_ERR_STR_(clEnqueueAcquireGLObjects)
907 #define __ENQUEUE_RELEASE_GL_ERR CL_HPP_ERR_STR_(clEnqueueReleaseGLObjects)
908 
909 #define __CREATE_PIPE_ERR CL_HPP_ERR_STR_(clCreatePipe)
910 #define __GET_PIPE_INFO_ERR CL_HPP_ERR_STR_(clGetPipeInfo)
911 
912 
913 #define __RETAIN_ERR CL_HPP_ERR_STR_(Retain Object)
914 #define __RELEASE_ERR CL_HPP_ERR_STR_(Release Object)
915 #define __FLUSH_ERR CL_HPP_ERR_STR_(clFlush)
916 #define __FINISH_ERR CL_HPP_ERR_STR_(clFinish)
917 #define __VECTOR_CAPACITY_ERR CL_HPP_ERR_STR_(Vector capacity error)
918 
919 #if CL_HPP_TARGET_OPENCL_VERSION >= 210
920 #define __GET_HOST_TIMER_ERR CL_HPP_ERR_STR_(clGetHostTimer)
921 #define __GET_DEVICE_AND_HOST_TIMER_ERR CL_HPP_ERR_STR_(clGetDeviceAndHostTimer)
922 #endif
923 #if CL_HPP_TARGET_OPENCL_VERSION >= 220
924 #define __SET_PROGRAM_RELEASE_CALLBACK_ERR CL_HPP_ERR_STR_(clSetProgramReleaseCallback)
925 #define __SET_PROGRAM_SPECIALIZATION_CONSTANT_ERR CL_HPP_ERR_STR_(clSetProgramSpecializationConstant)
926 #endif
927 
928 
932 #if CL_HPP_TARGET_OPENCL_VERSION >= 120
933 #define __CREATE_SUB_DEVICES_ERR CL_HPP_ERR_STR_(clCreateSubDevices)
934 #else
935 #define __CREATE_SUB_DEVICES_ERR CL_HPP_ERR_STR_(clCreateSubDevicesEXT)
936 #endif // CL_HPP_TARGET_OPENCL_VERSION >= 120
937 
941 #if defined(CL_USE_DEPRECATED_OPENCL_1_1_APIS)
942 #define __ENQUEUE_MARKER_ERR CL_HPP_ERR_STR_(clEnqueueMarker)
943 #define __ENQUEUE_WAIT_FOR_EVENTS_ERR CL_HPP_ERR_STR_(clEnqueueWaitForEvents)
944 #define __ENQUEUE_BARRIER_ERR CL_HPP_ERR_STR_(clEnqueueBarrier)
945 #define __UNLOAD_COMPILER_ERR CL_HPP_ERR_STR_(clUnloadCompiler)
946 #define __CREATE_GL_TEXTURE_2D_ERR CL_HPP_ERR_STR_(clCreateFromGLTexture2D)
947 #define __CREATE_GL_TEXTURE_3D_ERR CL_HPP_ERR_STR_(clCreateFromGLTexture3D)
948 #define __CREATE_IMAGE2D_ERR CL_HPP_ERR_STR_(clCreateImage2D)
949 #define __CREATE_IMAGE3D_ERR CL_HPP_ERR_STR_(clCreateImage3D)
950 #endif // #if defined(CL_USE_DEPRECATED_OPENCL_1_1_APIS)
951 
955 #if defined(CL_USE_DEPRECATED_OPENCL_1_2_APIS)
956 #define __CREATE_COMMAND_QUEUE_ERR CL_HPP_ERR_STR_(clCreateCommandQueue)
957 #define __ENQUEUE_TASK_ERR CL_HPP_ERR_STR_(clEnqueueTask)
958 #define __CREATE_SAMPLER_ERR CL_HPP_ERR_STR_(clCreateSampler)
959 #endif // #if defined(CL_USE_DEPRECATED_OPENCL_1_1_APIS)
960 
964 #if CL_HPP_TARGET_OPENCL_VERSION >= 120
965 #define __ENQUEUE_MARKER_WAIT_LIST_ERR CL_HPP_ERR_STR_(clEnqueueMarkerWithWaitList)
966 #define __ENQUEUE_BARRIER_WAIT_LIST_ERR CL_HPP_ERR_STR_(clEnqueueBarrierWithWaitList)
967 #endif // CL_HPP_TARGET_OPENCL_VERSION >= 120
968 
969 #if CL_HPP_TARGET_OPENCL_VERSION >= 210
970 #define __CLONE_KERNEL_ERR CL_HPP_ERR_STR_(clCloneKernel)
971 #endif // CL_HPP_TARGET_OPENCL_VERSION >= 210
972 
973 #endif // CL_HPP_USER_OVERRIDE_ERROR_STRINGS
974 
976 
977 namespace detail {
978 
979 // Generic getInfoHelper. The final parameter is used to guide overload
980 // resolution: the actual parameter passed is an int, which makes this
981 // a worse conversion sequence than a specialization that declares the
982 // parameter as an int.
983 template<typename Functor, typename T>
984 inline cl_int getInfoHelper(Functor f, cl_uint name, T* param, long)
985 {
986  return f(name, sizeof(T), param, NULL);
987 }
988 
989 // Specialized for getInfo<CL_PROGRAM_BINARIES>
990 // Assumes that the output vector was correctly resized on the way in
991 template <typename Func>
992 inline cl_int getInfoHelper(Func f, cl_uint name, vector<vector<unsigned char>>* param, int)
993 {
994  if (name != CL_PROGRAM_BINARIES) {
995  return CL_INVALID_VALUE;
996  }
997  if (param) {
998  // Create array of pointers, calculate total size and pass pointer array in
999  size_type numBinaries = param->size();
1000  vector<unsigned char*> binariesPointers(numBinaries);
1001 
1002  for (size_type i = 0; i < numBinaries; ++i)
1003  {
1004  binariesPointers[i] = (*param)[i].data();
1005  }
1006 
1007  cl_int err = f(name, numBinaries * sizeof(unsigned char*), binariesPointers.data(), NULL);
1008 
1009  if (err != CL_SUCCESS) {
1010  return err;
1011  }
1012  }
1013 
1014 
1015  return CL_SUCCESS;
1016 }
1017 
1018 // Specialized getInfoHelper for vector params
1019 template <typename Func, typename T>
1020 inline cl_int getInfoHelper(Func f, cl_uint name, vector<T>* param, long)
1021 {
1022  size_type required;
1023  cl_int err = f(name, 0, NULL, &required);
1024  if (err != CL_SUCCESS) {
1025  return err;
1026  }
1027  const size_type elements = required / sizeof(T);
1028 
1029  // Temporary to avoid changing param on an error
1030  vector<T> localData(elements);
1031  err = f(name, required, localData.data(), NULL);
1032  if (err != CL_SUCCESS) {
1033  return err;
1034  }
1035  if (param) {
1036  *param = std::move(localData);
1037  }
1038 
1039  return CL_SUCCESS;
1040 }
1041 
1042 /* Specialization for reference-counted types. This depends on the
1043  * existence of Wrapper<T>::cl_type, and none of the other types having the
1044  * cl_type member. Note that simplify specifying the parameter as Wrapper<T>
1045  * does not work, because when using a derived type (e.g. Context) the generic
1046  * template will provide a better match.
1047  */
1048 template <typename Func, typename T>
1049 inline cl_int getInfoHelper(
1050  Func f, cl_uint name, vector<T>* param, int, typename T::cl_type = 0)
1051 {
1052  size_type required;
1053  cl_int err = f(name, 0, NULL, &required);
1054  if (err != CL_SUCCESS) {
1055  return err;
1056  }
1057 
1058  const size_type elements = required / sizeof(typename T::cl_type);
1059 
1060  vector<typename T::cl_type> value(elements);
1061  err = f(name, required, value.data(), NULL);
1062  if (err != CL_SUCCESS) {
1063  return err;
1064  }
1065 
1066  if (param) {
1067  // Assign to convert CL type to T for each element
1068  param->resize(elements);
1069 
1070  // Assign to param, constructing with retain behaviour
1071  // to correctly capture each underlying CL object
1072  for (size_type i = 0; i < elements; i++) {
1073  (*param)[i] = T(value[i], true);
1074  }
1075  }
1076  return CL_SUCCESS;
1077 }
1078 
1079 // Specialized GetInfoHelper for string params
1080 template <typename Func>
1081 inline cl_int getInfoHelper(Func f, cl_uint name, string* param, long)
1082 {
1083  size_type required;
1084  cl_int err = f(name, 0, NULL, &required);
1085  if (err != CL_SUCCESS) {
1086  return err;
1087  }
1088 
1089  // std::string has a constant data member
1090  // a char vector does not
1091  if (required > 0) {
1092  vector<char> value(required);
1093  err = f(name, required, value.data(), NULL);
1094  if (err != CL_SUCCESS) {
1095  return err;
1096  }
1097  if (param) {
1098  param->assign(begin(value), prev(end(value)));
1099  }
1100  }
1101  else if (param) {
1102  param->assign("");
1103  }
1104  return CL_SUCCESS;
1105 }
1106 
1107 // Specialized GetInfoHelper for clsize_t params
1108 template <typename Func, size_type N>
1109 inline cl_int getInfoHelper(Func f, cl_uint name, array<size_type, N>* param, long)
1110 {
1111  size_type required;
1112  cl_int err = f(name, 0, NULL, &required);
1113  if (err != CL_SUCCESS) {
1114  return err;
1115  }
1116 
1117  size_type elements = required / sizeof(size_type);
1118  vector<size_type> value(elements, 0);
1119 
1120  err = f(name, required, value.data(), NULL);
1121  if (err != CL_SUCCESS) {
1122  return err;
1123  }
1124 
1125  // Bound the copy with N to prevent overruns
1126  // if passed N > than the amount copied
1127  if (elements > N) {
1128  elements = N;
1129  }
1130  for (size_type i = 0; i < elements; ++i) {
1131  (*param)[i] = value[i];
1132  }
1133 
1134  return CL_SUCCESS;
1135 }
1136 
1137 template<typename T> struct ReferenceHandler;
1138 
1139 /* Specialization for reference-counted types. This depends on the
1140  * existence of Wrapper<T>::cl_type, and none of the other types having the
1141  * cl_type member. Note that simplify specifying the parameter as Wrapper<T>
1142  * does not work, because when using a derived type (e.g. Context) the generic
1143  * template will provide a better match.
1144  */
1145 template<typename Func, typename T>
1146 inline cl_int getInfoHelper(Func f, cl_uint name, T* param, int, typename T::cl_type = 0)
1147 {
1148  typename T::cl_type value;
1149  cl_int err = f(name, sizeof(value), &value, NULL);
1150  if (err != CL_SUCCESS) {
1151  return err;
1152  }
1153  *param = value;
1154  if (value != NULL)
1155  {
1156  err = param->retain();
1157  if (err != CL_SUCCESS) {
1158  return err;
1159  }
1160  }
1161  return CL_SUCCESS;
1162 }
1163 
1164 #define CL_HPP_PARAM_NAME_INFO_1_0_(F) \
1165  F(cl_platform_info, CL_PLATFORM_PROFILE, string) \
1166  F(cl_platform_info, CL_PLATFORM_VERSION, string) \
1167  F(cl_platform_info, CL_PLATFORM_NAME, string) \
1168  F(cl_platform_info, CL_PLATFORM_VENDOR, string) \
1169  F(cl_platform_info, CL_PLATFORM_EXTENSIONS, string) \
1170  \
1171  F(cl_device_info, CL_DEVICE_TYPE, cl_device_type) \
1172  F(cl_device_info, CL_DEVICE_VENDOR_ID, cl_uint) \
1173  F(cl_device_info, CL_DEVICE_MAX_COMPUTE_UNITS, cl_uint) \
1174  F(cl_device_info, CL_DEVICE_MAX_WORK_ITEM_DIMENSIONS, cl_uint) \
1175  F(cl_device_info, CL_DEVICE_MAX_WORK_GROUP_SIZE, size_type) \
1176  F(cl_device_info, CL_DEVICE_MAX_WORK_ITEM_SIZES, cl::vector<size_type>) \
1177  F(cl_device_info, CL_DEVICE_PREFERRED_VECTOR_WIDTH_CHAR, cl_uint) \
1178  F(cl_device_info, CL_DEVICE_PREFERRED_VECTOR_WIDTH_SHORT, cl_uint) \
1179  F(cl_device_info, CL_DEVICE_PREFERRED_VECTOR_WIDTH_INT, cl_uint) \
1180  F(cl_device_info, CL_DEVICE_PREFERRED_VECTOR_WIDTH_LONG, cl_uint) \
1181  F(cl_device_info, CL_DEVICE_PREFERRED_VECTOR_WIDTH_FLOAT, cl_uint) \
1182  F(cl_device_info, CL_DEVICE_PREFERRED_VECTOR_WIDTH_DOUBLE, cl_uint) \
1183  F(cl_device_info, CL_DEVICE_MAX_CLOCK_FREQUENCY, cl_uint) \
1184  F(cl_device_info, CL_DEVICE_ADDRESS_BITS, cl_uint) \
1185  F(cl_device_info, CL_DEVICE_MAX_READ_IMAGE_ARGS, cl_uint) \
1186  F(cl_device_info, CL_DEVICE_MAX_WRITE_IMAGE_ARGS, cl_uint) \
1187  F(cl_device_info, CL_DEVICE_MAX_MEM_ALLOC_SIZE, cl_ulong) \
1188  F(cl_device_info, CL_DEVICE_IMAGE2D_MAX_WIDTH, size_type) \
1189  F(cl_device_info, CL_DEVICE_IMAGE2D_MAX_HEIGHT, size_type) \
1190  F(cl_device_info, CL_DEVICE_IMAGE3D_MAX_WIDTH, size_type) \
1191  F(cl_device_info, CL_DEVICE_IMAGE3D_MAX_HEIGHT, size_type) \
1192  F(cl_device_info, CL_DEVICE_IMAGE3D_MAX_DEPTH, size_type) \
1193  F(cl_device_info, CL_DEVICE_IMAGE_SUPPORT, cl_bool) \
1194  F(cl_device_info, CL_DEVICE_MAX_PARAMETER_SIZE, size_type) \
1195  F(cl_device_info, CL_DEVICE_MAX_SAMPLERS, cl_uint) \
1196  F(cl_device_info, CL_DEVICE_MEM_BASE_ADDR_ALIGN, cl_uint) \
1197  F(cl_device_info, CL_DEVICE_MIN_DATA_TYPE_ALIGN_SIZE, cl_uint) \
1198  F(cl_device_info, CL_DEVICE_SINGLE_FP_CONFIG, cl_device_fp_config) \
1199  F(cl_device_info, CL_DEVICE_DOUBLE_FP_CONFIG, cl_device_fp_config) \
1200  F(cl_device_info, CL_DEVICE_HALF_FP_CONFIG, cl_device_fp_config) \
1201  F(cl_device_info, CL_DEVICE_GLOBAL_MEM_CACHE_TYPE, cl_device_mem_cache_type) \
1202  F(cl_device_info, CL_DEVICE_GLOBAL_MEM_CACHELINE_SIZE, cl_uint)\
1203  F(cl_device_info, CL_DEVICE_GLOBAL_MEM_CACHE_SIZE, cl_ulong) \
1204  F(cl_device_info, CL_DEVICE_GLOBAL_MEM_SIZE, cl_ulong) \
1205  F(cl_device_info, CL_DEVICE_MAX_CONSTANT_BUFFER_SIZE, cl_ulong) \
1206  F(cl_device_info, CL_DEVICE_MAX_CONSTANT_ARGS, cl_uint) \
1207  F(cl_device_info, CL_DEVICE_LOCAL_MEM_TYPE, cl_device_local_mem_type) \
1208  F(cl_device_info, CL_DEVICE_LOCAL_MEM_SIZE, cl_ulong) \
1209  F(cl_device_info, CL_DEVICE_ERROR_CORRECTION_SUPPORT, cl_bool) \
1210  F(cl_device_info, CL_DEVICE_PROFILING_TIMER_RESOLUTION, size_type) \
1211  F(cl_device_info, CL_DEVICE_ENDIAN_LITTLE, cl_bool) \
1212  F(cl_device_info, CL_DEVICE_AVAILABLE, cl_bool) \
1213  F(cl_device_info, CL_DEVICE_COMPILER_AVAILABLE, cl_bool) \
1214  F(cl_device_info, CL_DEVICE_EXECUTION_CAPABILITIES, cl_device_exec_capabilities) \
1215  F(cl_device_info, CL_DEVICE_PLATFORM, cl_platform_id) \
1216  F(cl_device_info, CL_DEVICE_NAME, string) \
1217  F(cl_device_info, CL_DEVICE_VENDOR, string) \
1218  F(cl_device_info, CL_DRIVER_VERSION, string) \
1219  F(cl_device_info, CL_DEVICE_PROFILE, string) \
1220  F(cl_device_info, CL_DEVICE_VERSION, string) \
1221  F(cl_device_info, CL_DEVICE_EXTENSIONS, string) \
1222  \
1223  F(cl_context_info, CL_CONTEXT_REFERENCE_COUNT, cl_uint) \
1224  F(cl_context_info, CL_CONTEXT_DEVICES, cl::vector<Device>) \
1225  F(cl_context_info, CL_CONTEXT_PROPERTIES, cl::vector<cl_context_properties>) \
1226  \
1227  F(cl_event_info, CL_EVENT_COMMAND_QUEUE, cl::CommandQueue) \
1228  F(cl_event_info, CL_EVENT_COMMAND_TYPE, cl_command_type) \
1229  F(cl_event_info, CL_EVENT_REFERENCE_COUNT, cl_uint) \
1230  F(cl_event_info, CL_EVENT_COMMAND_EXECUTION_STATUS, cl_int) \
1231  \
1232  F(cl_profiling_info, CL_PROFILING_COMMAND_QUEUED, cl_ulong) \
1233  F(cl_profiling_info, CL_PROFILING_COMMAND_SUBMIT, cl_ulong) \
1234  F(cl_profiling_info, CL_PROFILING_COMMAND_START, cl_ulong) \
1235  F(cl_profiling_info, CL_PROFILING_COMMAND_END, cl_ulong) \
1236  \
1237  F(cl_mem_info, CL_MEM_TYPE, cl_mem_object_type) \
1238  F(cl_mem_info, CL_MEM_FLAGS, cl_mem_flags) \
1239  F(cl_mem_info, CL_MEM_SIZE, size_type) \
1240  F(cl_mem_info, CL_MEM_HOST_PTR, void*) \
1241  F(cl_mem_info, CL_MEM_MAP_COUNT, cl_uint) \
1242  F(cl_mem_info, CL_MEM_REFERENCE_COUNT, cl_uint) \
1243  F(cl_mem_info, CL_MEM_CONTEXT, cl::Context) \
1244  \
1245  F(cl_image_info, CL_IMAGE_FORMAT, cl_image_format) \
1246  F(cl_image_info, CL_IMAGE_ELEMENT_SIZE, size_type) \
1247  F(cl_image_info, CL_IMAGE_ROW_PITCH, size_type) \
1248  F(cl_image_info, CL_IMAGE_SLICE_PITCH, size_type) \
1249  F(cl_image_info, CL_IMAGE_WIDTH, size_type) \
1250  F(cl_image_info, CL_IMAGE_HEIGHT, size_type) \
1251  F(cl_image_info, CL_IMAGE_DEPTH, size_type) \
1252  \
1253  F(cl_sampler_info, CL_SAMPLER_REFERENCE_COUNT, cl_uint) \
1254  F(cl_sampler_info, CL_SAMPLER_CONTEXT, cl::Context) \
1255  F(cl_sampler_info, CL_SAMPLER_NORMALIZED_COORDS, cl_bool) \
1256  F(cl_sampler_info, CL_SAMPLER_ADDRESSING_MODE, cl_addressing_mode) \
1257  F(cl_sampler_info, CL_SAMPLER_FILTER_MODE, cl_filter_mode) \
1258  \
1259  F(cl_program_info, CL_PROGRAM_REFERENCE_COUNT, cl_uint) \
1260  F(cl_program_info, CL_PROGRAM_CONTEXT, cl::Context) \
1261  F(cl_program_info, CL_PROGRAM_NUM_DEVICES, cl_uint) \
1262  F(cl_program_info, CL_PROGRAM_DEVICES, cl::vector<Device>) \
1263  F(cl_program_info, CL_PROGRAM_SOURCE, string) \
1264  F(cl_program_info, CL_PROGRAM_BINARY_SIZES, cl::vector<size_type>) \
1265  F(cl_program_info, CL_PROGRAM_BINARIES, cl::vector<cl::vector<unsigned char>>) \
1266  \
1267  F(cl_program_build_info, CL_PROGRAM_BUILD_STATUS, cl_build_status) \
1268  F(cl_program_build_info, CL_PROGRAM_BUILD_OPTIONS, string) \
1269  F(cl_program_build_info, CL_PROGRAM_BUILD_LOG, string) \
1270  \
1271  F(cl_kernel_info, CL_KERNEL_FUNCTION_NAME, string) \
1272  F(cl_kernel_info, CL_KERNEL_NUM_ARGS, cl_uint) \
1273  F(cl_kernel_info, CL_KERNEL_REFERENCE_COUNT, cl_uint) \
1274  F(cl_kernel_info, CL_KERNEL_CONTEXT, cl::Context) \
1275  F(cl_kernel_info, CL_KERNEL_PROGRAM, cl::Program) \
1276  \
1277  F(cl_kernel_work_group_info, CL_KERNEL_WORK_GROUP_SIZE, size_type) \
1278  F(cl_kernel_work_group_info, CL_KERNEL_COMPILE_WORK_GROUP_SIZE, cl::detail::size_t_array) \
1279  F(cl_kernel_work_group_info, CL_KERNEL_LOCAL_MEM_SIZE, cl_ulong) \
1280  \
1281  F(cl_command_queue_info, CL_QUEUE_CONTEXT, cl::Context) \
1282  F(cl_command_queue_info, CL_QUEUE_DEVICE, cl::Device) \
1283  F(cl_command_queue_info, CL_QUEUE_REFERENCE_COUNT, cl_uint) \
1284  F(cl_command_queue_info, CL_QUEUE_PROPERTIES, cl_command_queue_properties)
1285 
1286 
1287 #define CL_HPP_PARAM_NAME_INFO_1_1_(F) \
1288  F(cl_context_info, CL_CONTEXT_NUM_DEVICES, cl_uint)\
1289  F(cl_device_info, CL_DEVICE_PREFERRED_VECTOR_WIDTH_HALF, cl_uint) \
1290  F(cl_device_info, CL_DEVICE_NATIVE_VECTOR_WIDTH_CHAR, cl_uint) \
1291  F(cl_device_info, CL_DEVICE_NATIVE_VECTOR_WIDTH_SHORT, cl_uint) \
1292  F(cl_device_info, CL_DEVICE_NATIVE_VECTOR_WIDTH_INT, cl_uint) \
1293  F(cl_device_info, CL_DEVICE_NATIVE_VECTOR_WIDTH_LONG, cl_uint) \
1294  F(cl_device_info, CL_DEVICE_NATIVE_VECTOR_WIDTH_FLOAT, cl_uint) \
1295  F(cl_device_info, CL_DEVICE_NATIVE_VECTOR_WIDTH_DOUBLE, cl_uint) \
1296  F(cl_device_info, CL_DEVICE_NATIVE_VECTOR_WIDTH_HALF, cl_uint) \
1297  F(cl_device_info, CL_DEVICE_OPENCL_C_VERSION, string) \
1298  \
1299  F(cl_mem_info, CL_MEM_ASSOCIATED_MEMOBJECT, cl::Memory) \
1300  F(cl_mem_info, CL_MEM_OFFSET, size_type) \
1301  \
1302  F(cl_kernel_work_group_info, CL_KERNEL_PREFERRED_WORK_GROUP_SIZE_MULTIPLE, size_type) \
1303  F(cl_kernel_work_group_info, CL_KERNEL_PRIVATE_MEM_SIZE, cl_ulong) \
1304  \
1305  F(cl_event_info, CL_EVENT_CONTEXT, cl::Context)
1306 
1307 #define CL_HPP_PARAM_NAME_INFO_1_2_(F) \
1308  F(cl_program_info, CL_PROGRAM_NUM_KERNELS, size_type) \
1309  F(cl_program_info, CL_PROGRAM_KERNEL_NAMES, string) \
1310  \
1311  F(cl_program_build_info, CL_PROGRAM_BINARY_TYPE, cl_program_binary_type) \
1312  \
1313  F(cl_kernel_info, CL_KERNEL_ATTRIBUTES, string) \
1314  \
1315  F(cl_kernel_arg_info, CL_KERNEL_ARG_ADDRESS_QUALIFIER, cl_kernel_arg_address_qualifier) \
1316  F(cl_kernel_arg_info, CL_KERNEL_ARG_ACCESS_QUALIFIER, cl_kernel_arg_access_qualifier) \
1317  F(cl_kernel_arg_info, CL_KERNEL_ARG_TYPE_NAME, string) \
1318  F(cl_kernel_arg_info, CL_KERNEL_ARG_NAME, string) \
1319  F(cl_kernel_arg_info, CL_KERNEL_ARG_TYPE_QUALIFIER, cl_kernel_arg_type_qualifier) \
1320  \
1321  F(cl_device_info, CL_DEVICE_PARENT_DEVICE, cl::Device) \
1322  F(cl_device_info, CL_DEVICE_PARTITION_PROPERTIES, cl::vector<cl_device_partition_property>) \
1323  F(cl_device_info, CL_DEVICE_PARTITION_TYPE, cl::vector<cl_device_partition_property>) \
1324  F(cl_device_info, CL_DEVICE_REFERENCE_COUNT, cl_uint) \
1325  F(cl_device_info, CL_DEVICE_PREFERRED_INTEROP_USER_SYNC, size_type) \
1326  F(cl_device_info, CL_DEVICE_PARTITION_AFFINITY_DOMAIN, cl_device_affinity_domain) \
1327  F(cl_device_info, CL_DEVICE_BUILT_IN_KERNELS, string) \
1328  \
1329  F(cl_image_info, CL_IMAGE_ARRAY_SIZE, size_type) \
1330  F(cl_image_info, CL_IMAGE_NUM_MIP_LEVELS, cl_uint) \
1331  F(cl_image_info, CL_IMAGE_NUM_SAMPLES, cl_uint)
1332 
1333 #define CL_HPP_PARAM_NAME_INFO_2_0_(F) \
1334  F(cl_device_info, CL_DEVICE_QUEUE_ON_HOST_PROPERTIES, cl_command_queue_properties) \
1335  F(cl_device_info, CL_DEVICE_QUEUE_ON_DEVICE_PROPERTIES, cl_command_queue_properties) \
1336  F(cl_device_info, CL_DEVICE_QUEUE_ON_DEVICE_PREFERRED_SIZE, cl_uint) \
1337  F(cl_device_info, CL_DEVICE_QUEUE_ON_DEVICE_MAX_SIZE, cl_uint) \
1338  F(cl_device_info, CL_DEVICE_MAX_ON_DEVICE_QUEUES, cl_uint) \
1339  F(cl_device_info, CL_DEVICE_MAX_ON_DEVICE_EVENTS, cl_uint) \
1340  F(cl_device_info, CL_DEVICE_MAX_PIPE_ARGS, cl_uint) \
1341  F(cl_device_info, CL_DEVICE_PIPE_MAX_ACTIVE_RESERVATIONS, cl_uint) \
1342  F(cl_device_info, CL_DEVICE_PIPE_MAX_PACKET_SIZE, cl_uint) \
1343  F(cl_device_info, CL_DEVICE_SVM_CAPABILITIES, cl_device_svm_capabilities) \
1344  F(cl_device_info, CL_DEVICE_PREFERRED_PLATFORM_ATOMIC_ALIGNMENT, cl_uint) \
1345  F(cl_device_info, CL_DEVICE_PREFERRED_GLOBAL_ATOMIC_ALIGNMENT, cl_uint) \
1346  F(cl_device_info, CL_DEVICE_PREFERRED_LOCAL_ATOMIC_ALIGNMENT, cl_uint) \
1347  F(cl_command_queue_info, CL_QUEUE_SIZE, cl_uint) \
1348  F(cl_mem_info, CL_MEM_USES_SVM_POINTER, cl_bool) \
1349  F(cl_program_build_info, CL_PROGRAM_BUILD_GLOBAL_VARIABLE_TOTAL_SIZE, size_type) \
1350  F(cl_pipe_info, CL_PIPE_PACKET_SIZE, cl_uint) \
1351  F(cl_pipe_info, CL_PIPE_MAX_PACKETS, cl_uint)
1352 
1353 #define CL_HPP_PARAM_NAME_INFO_SUBGROUP_KHR_(F) \
1354  F(cl_kernel_sub_group_info, CL_KERNEL_MAX_SUB_GROUP_SIZE_FOR_NDRANGE_KHR, size_type) \
1355  F(cl_kernel_sub_group_info, CL_KERNEL_SUB_GROUP_COUNT_FOR_NDRANGE_KHR, size_type)
1356 
1357 #define CL_HPP_PARAM_NAME_INFO_IL_KHR_(F) \
1358  F(cl_device_info, CL_DEVICE_IL_VERSION_KHR, string) \
1359  F(cl_program_info, CL_PROGRAM_IL_KHR, cl::vector<unsigned char>)
1360 
1361 #define CL_HPP_PARAM_NAME_INFO_2_1_(F) \
1362  F(cl_platform_info, CL_PLATFORM_HOST_TIMER_RESOLUTION, size_type) \
1363  F(cl_program_info, CL_PROGRAM_IL, cl::vector<unsigned char>) \
1364  F(cl_kernel_info, CL_KERNEL_MAX_NUM_SUB_GROUPS, size_type) \
1365  F(cl_kernel_info, CL_KERNEL_COMPILE_NUM_SUB_GROUPS, size_type) \
1366  F(cl_device_info, CL_DEVICE_MAX_NUM_SUB_GROUPS, cl_uint) \
1367  F(cl_device_info, CL_DEVICE_IL_VERSION, string) \
1368  F(cl_device_info, CL_DEVICE_SUB_GROUP_INDEPENDENT_FORWARD_PROGRESS, cl_bool) \
1369  F(cl_command_queue_info, CL_QUEUE_DEVICE_DEFAULT, cl::DeviceCommandQueue) \
1370  F(cl_kernel_sub_group_info, CL_KERNEL_MAX_SUB_GROUP_SIZE_FOR_NDRANGE, size_type) \
1371  F(cl_kernel_sub_group_info, CL_KERNEL_SUB_GROUP_COUNT_FOR_NDRANGE, size_type) \
1372  F(cl_kernel_sub_group_info, CL_KERNEL_LOCAL_SIZE_FOR_SUB_GROUP_COUNT, cl::detail::size_t_array)
1373 
1374 #define CL_HPP_PARAM_NAME_INFO_2_2_(F) \
1375  F(cl_program_info, CL_PROGRAM_SCOPE_GLOBAL_CTORS_PRESENT, cl_bool) \
1376  F(cl_program_info, CL_PROGRAM_SCOPE_GLOBAL_DTORS_PRESENT, cl_bool)
1377 
1378 #define CL_HPP_PARAM_NAME_DEVICE_FISSION_(F) \
1379  F(cl_device_info, CL_DEVICE_PARENT_DEVICE_EXT, cl_device_id) \
1380  F(cl_device_info, CL_DEVICE_PARTITION_TYPES_EXT, cl::vector<cl_device_partition_property_ext>) \
1381  F(cl_device_info, CL_DEVICE_AFFINITY_DOMAINS_EXT, cl::vector<cl_device_partition_property_ext>) \
1382  F(cl_device_info, CL_DEVICE_REFERENCE_COUNT_EXT , cl_uint) \
1383  F(cl_device_info, CL_DEVICE_PARTITION_STYLE_EXT, cl::vector<cl_device_partition_property_ext>)
1384 
1385 #define CL_HPP_PARAM_NAME_CL_KHR_EXTENDED_VERSIONING_(F) \
1386  F(cl_platform_info, CL_PLATFORM_NUMERIC_VERSION_KHR, cl_version_khr) \
1387  F(cl_platform_info, CL_PLATFORM_EXTENSIONS_WITH_VERSION_KHR, cl::vector<cl_name_version_khr>) \
1388  \
1389  F(cl_device_info, CL_DEVICE_NUMERIC_VERSION_KHR, cl_version_khr) \
1390  F(cl_device_info, CL_DEVICE_OPENCL_C_NUMERIC_VERSION_KHR, cl_version_khr) \
1391  F(cl_device_info, CL_DEVICE_EXTENSIONS_WITH_VERSION_KHR, cl::vector<cl_name_version_khr>) \
1392  F(cl_device_info, CL_DEVICE_ILS_WITH_VERSION_KHR, cl::vector<cl_name_version_khr>) \
1393  F(cl_device_info, CL_DEVICE_BUILT_IN_KERNELS_WITH_VERSION_KHR, cl::vector<cl_name_version_khr>)
1394 
1395 template <typename enum_type, cl_int Name>
1396 struct param_traits {};
1397 
1398 #define CL_HPP_DECLARE_PARAM_TRAITS_(token, param_name, T) \
1399 struct token; \
1400 template<> \
1401 struct param_traits<detail:: token,param_name> \
1402 { \
1403  enum { value = param_name }; \
1404  typedef T param_type; \
1405 };
1406 
1407 CL_HPP_PARAM_NAME_INFO_1_0_(CL_HPP_DECLARE_PARAM_TRAITS_)
1408 #if CL_HPP_TARGET_OPENCL_VERSION >= 110
1409 CL_HPP_PARAM_NAME_INFO_1_1_(CL_HPP_DECLARE_PARAM_TRAITS_)
1410 #endif // CL_HPP_TARGET_OPENCL_VERSION >= 110
1411 #if CL_HPP_TARGET_OPENCL_VERSION >= 120
1412 CL_HPP_PARAM_NAME_INFO_1_2_(CL_HPP_DECLARE_PARAM_TRAITS_)
1413 #endif // CL_HPP_TARGET_OPENCL_VERSION >= 120
1414 #if CL_HPP_TARGET_OPENCL_VERSION >= 200
1415 CL_HPP_PARAM_NAME_INFO_2_0_(CL_HPP_DECLARE_PARAM_TRAITS_)
1416 #endif // CL_HPP_TARGET_OPENCL_VERSION >= 200
1417 #if CL_HPP_TARGET_OPENCL_VERSION >= 210
1418 CL_HPP_PARAM_NAME_INFO_2_1_(CL_HPP_DECLARE_PARAM_TRAITS_)
1419 #endif // CL_HPP_TARGET_OPENCL_VERSION >= 210
1420 #if CL_HPP_TARGET_OPENCL_VERSION >= 220
1421 CL_HPP_PARAM_NAME_INFO_2_2_(CL_HPP_DECLARE_PARAM_TRAITS_)
1422 #endif // CL_HPP_TARGET_OPENCL_VERSION >= 220
1423 
1424 #if defined(CL_HPP_USE_CL_SUB_GROUPS_KHR) && CL_HPP_TARGET_OPENCL_VERSION < 210
1425 CL_HPP_PARAM_NAME_INFO_SUBGROUP_KHR_(CL_HPP_DECLARE_PARAM_TRAITS_)
1426 #endif // #if defined(CL_HPP_USE_CL_SUB_GROUPS_KHR) && CL_HPP_TARGET_OPENCL_VERSION < 210
1427 
1428 #if defined(CL_HPP_USE_IL_KHR)
1429 CL_HPP_PARAM_NAME_INFO_IL_KHR_(CL_HPP_DECLARE_PARAM_TRAITS_)
1430 #endif // #if defined(CL_HPP_USE_IL_KHR)
1431 
1432 
1433 // Flags deprecated in OpenCL 2.0
1434 #define CL_HPP_PARAM_NAME_INFO_1_0_DEPRECATED_IN_2_0_(F) \
1435  F(cl_device_info, CL_DEVICE_QUEUE_PROPERTIES, cl_command_queue_properties)
1436 
1437 #define CL_HPP_PARAM_NAME_INFO_1_1_DEPRECATED_IN_2_0_(F) \
1438  F(cl_device_info, CL_DEVICE_HOST_UNIFIED_MEMORY, cl_bool)
1439 
1440 #define CL_HPP_PARAM_NAME_INFO_1_2_DEPRECATED_IN_2_0_(F) \
1441  F(cl_image_info, CL_IMAGE_BUFFER, cl::Buffer)
1442 
1443 // Include deprecated query flags based on versions
1444 // Only include deprecated 1.0 flags if 2.0 not active as there is an enum clash
1445 #if CL_HPP_TARGET_OPENCL_VERSION > 100 && CL_HPP_MINIMUM_OPENCL_VERSION < 200 && CL_HPP_TARGET_OPENCL_VERSION < 200
1446 CL_HPP_PARAM_NAME_INFO_1_0_DEPRECATED_IN_2_0_(CL_HPP_DECLARE_PARAM_TRAITS_)
1447 #endif // CL_HPP_MINIMUM_OPENCL_VERSION < 110
1448 #if CL_HPP_TARGET_OPENCL_VERSION > 110 && CL_HPP_MINIMUM_OPENCL_VERSION < 200
1449 CL_HPP_PARAM_NAME_INFO_1_1_DEPRECATED_IN_2_0_(CL_HPP_DECLARE_PARAM_TRAITS_)
1450 #endif // CL_HPP_MINIMUM_OPENCL_VERSION < 120
1451 #if CL_HPP_TARGET_OPENCL_VERSION > 120 && CL_HPP_MINIMUM_OPENCL_VERSION < 200
1452 CL_HPP_PARAM_NAME_INFO_1_2_DEPRECATED_IN_2_0_(CL_HPP_DECLARE_PARAM_TRAITS_)
1453 #endif // CL_HPP_MINIMUM_OPENCL_VERSION < 200
1454 
1455 #if defined(CL_HPP_USE_CL_DEVICE_FISSION)
1456 CL_HPP_PARAM_NAME_DEVICE_FISSION_(CL_HPP_DECLARE_PARAM_TRAITS_);
1457 #endif // CL_HPP_USE_CL_DEVICE_FISSION
1458 
1459 #if defined(cl_khr_extended_versioning)
1460 CL_HPP_PARAM_NAME_CL_KHR_EXTENDED_VERSIONING_(CL_HPP_DECLARE_PARAM_TRAITS_);
1461 #endif // cl_khr_extended_versioning
1462 
1463 #ifdef CL_PLATFORM_ICD_SUFFIX_KHR
1464 CL_HPP_DECLARE_PARAM_TRAITS_(cl_platform_info, CL_PLATFORM_ICD_SUFFIX_KHR, string)
1465 #endif
1466 
1467 #ifdef CL_DEVICE_PROFILING_TIMER_OFFSET_AMD
1468 CL_HPP_DECLARE_PARAM_TRAITS_(cl_device_info, CL_DEVICE_PROFILING_TIMER_OFFSET_AMD, cl_ulong)
1469 #endif
1470 
1471 #ifdef CL_DEVICE_GLOBAL_FREE_MEMORY_AMD
1472 CL_HPP_DECLARE_PARAM_TRAITS_(cl_device_info, CL_DEVICE_GLOBAL_FREE_MEMORY_AMD, vector<size_type>)
1473 #endif
1474 #ifdef CL_DEVICE_SIMD_PER_COMPUTE_UNIT_AMD
1475 CL_HPP_DECLARE_PARAM_TRAITS_(cl_device_info, CL_DEVICE_SIMD_PER_COMPUTE_UNIT_AMD, cl_uint)
1476 #endif
1477 #ifdef CL_DEVICE_SIMD_WIDTH_AMD
1478 CL_HPP_DECLARE_PARAM_TRAITS_(cl_device_info, CL_DEVICE_SIMD_WIDTH_AMD, cl_uint)
1479 #endif
1480 #ifdef CL_DEVICE_SIMD_INSTRUCTION_WIDTH_AMD
1481 CL_HPP_DECLARE_PARAM_TRAITS_(cl_device_info, CL_DEVICE_SIMD_INSTRUCTION_WIDTH_AMD, cl_uint)
1482 #endif
1483 #ifdef CL_DEVICE_WAVEFRONT_WIDTH_AMD
1484 CL_HPP_DECLARE_PARAM_TRAITS_(cl_device_info, CL_DEVICE_WAVEFRONT_WIDTH_AMD, cl_uint)
1485 #endif
1486 #ifdef CL_DEVICE_GLOBAL_MEM_CHANNELS_AMD
1487 CL_HPP_DECLARE_PARAM_TRAITS_(cl_device_info, CL_DEVICE_GLOBAL_MEM_CHANNELS_AMD, cl_uint)
1488 #endif
1489 #ifdef CL_DEVICE_GLOBAL_MEM_CHANNEL_BANKS_AMD
1490 CL_HPP_DECLARE_PARAM_TRAITS_(cl_device_info, CL_DEVICE_GLOBAL_MEM_CHANNEL_BANKS_AMD, cl_uint)
1491 #endif
1492 #ifdef CL_DEVICE_GLOBAL_MEM_CHANNEL_BANK_WIDTH_AMD
1493 CL_HPP_DECLARE_PARAM_TRAITS_(cl_device_info, CL_DEVICE_GLOBAL_MEM_CHANNEL_BANK_WIDTH_AMD, cl_uint)
1494 #endif
1495 #ifdef CL_DEVICE_LOCAL_MEM_SIZE_PER_COMPUTE_UNIT_AMD
1496 CL_HPP_DECLARE_PARAM_TRAITS_(cl_device_info, CL_DEVICE_LOCAL_MEM_SIZE_PER_COMPUTE_UNIT_AMD, cl_uint)
1497 #endif
1498 #ifdef CL_DEVICE_LOCAL_MEM_BANKS_AMD
1499 CL_HPP_DECLARE_PARAM_TRAITS_(cl_device_info, CL_DEVICE_LOCAL_MEM_BANKS_AMD, cl_uint)
1500 #endif
1501 
1502 #ifdef CL_DEVICE_COMPUTE_UNITS_BITFIELD_ARM
1503 CL_HPP_DECLARE_PARAM_TRAITS_(cl_device_info, CL_DEVICE_COMPUTE_UNITS_BITFIELD_ARM, cl_ulong)
1504 #endif
1505 #ifdef CL_DEVICE_JOB_SLOTS_ARM
1506 CL_HPP_DECLARE_PARAM_TRAITS_(cl_device_info, CL_DEVICE_JOB_SLOTS_ARM, cl_uint)
1507 #endif
1508 
1509 #ifdef CL_DEVICE_COMPUTE_CAPABILITY_MAJOR_NV
1510 CL_HPP_DECLARE_PARAM_TRAITS_(cl_device_info, CL_DEVICE_COMPUTE_CAPABILITY_MAJOR_NV, cl_uint)
1511 #endif
1512 #ifdef CL_DEVICE_COMPUTE_CAPABILITY_MINOR_NV
1513 CL_HPP_DECLARE_PARAM_TRAITS_(cl_device_info, CL_DEVICE_COMPUTE_CAPABILITY_MINOR_NV, cl_uint)
1514 #endif
1515 #ifdef CL_DEVICE_REGISTERS_PER_BLOCK_NV
1516 CL_HPP_DECLARE_PARAM_TRAITS_(cl_device_info, CL_DEVICE_REGISTERS_PER_BLOCK_NV, cl_uint)
1517 #endif
1518 #ifdef CL_DEVICE_WARP_SIZE_NV
1519 CL_HPP_DECLARE_PARAM_TRAITS_(cl_device_info, CL_DEVICE_WARP_SIZE_NV, cl_uint)
1520 #endif
1521 #ifdef CL_DEVICE_GPU_OVERLAP_NV
1522 CL_HPP_DECLARE_PARAM_TRAITS_(cl_device_info, CL_DEVICE_GPU_OVERLAP_NV, cl_bool)
1523 #endif
1524 #ifdef CL_DEVICE_KERNEL_EXEC_TIMEOUT_NV
1525 CL_HPP_DECLARE_PARAM_TRAITS_(cl_device_info, CL_DEVICE_KERNEL_EXEC_TIMEOUT_NV, cl_bool)
1526 #endif
1527 #ifdef CL_DEVICE_INTEGRATED_MEMORY_NV
1528 CL_HPP_DECLARE_PARAM_TRAITS_(cl_device_info, CL_DEVICE_INTEGRATED_MEMORY_NV, cl_bool)
1529 #endif
1530 
1531 // Convenience functions
1532 
1533 template <typename Func, typename T>
1534 inline cl_int
1535 getInfo(Func f, cl_uint name, T* param)
1536 {
1537  return getInfoHelper(f, name, param, 0);
1538 }
1539 
1540 template <typename Func, typename Arg0>
1542 {
1543  Func f_; const Arg0& arg0_;
1544  cl_int operator ()(
1545  cl_uint param, size_type size, void* value, size_type* size_ret)
1546  { return f_(arg0_, param, size, value, size_ret); }
1547 };
1548 
1549 template <typename Func, typename Arg0, typename Arg1>
1551 {
1552  Func f_; const Arg0& arg0_; const Arg1& arg1_;
1553  cl_int operator ()(
1554  cl_uint param, size_type size, void* value, size_type* size_ret)
1555  { return f_(arg0_, arg1_, param, size, value, size_ret); }
1556 };
1557 
1558 template <typename Func, typename Arg0, typename T>
1559 inline cl_int
1560 getInfo(Func f, const Arg0& arg0, cl_uint name, T* param)
1561 {
1562  GetInfoFunctor0<Func, Arg0> f0 = { f, arg0 };
1563  return getInfoHelper(f0, name, param, 0);
1564 }
1565 
1566 template <typename Func, typename Arg0, typename Arg1, typename T>
1567 inline cl_int
1568 getInfo(Func f, const Arg0& arg0, const Arg1& arg1, cl_uint name, T* param)
1569 {
1570  GetInfoFunctor1<Func, Arg0, Arg1> f0 = { f, arg0, arg1 };
1571  return getInfoHelper(f0, name, param, 0);
1572 }
1573 
1574 
1575 template<typename T>
1577 { };
1578 
1579 #if CL_HPP_TARGET_OPENCL_VERSION >= 120
1580 
1583 template <>
1584 struct ReferenceHandler<cl_device_id>
1585 {
1595  static cl_int retain(cl_device_id device)
1596  { return ::clRetainDevice(device); }
1606  static cl_int release(cl_device_id device)
1607  { return ::clReleaseDevice(device); }
1608 };
1609 #else // CL_HPP_TARGET_OPENCL_VERSION >= 120
1610 
1613 template <>
1614 struct ReferenceHandler<cl_device_id>
1615 {
1616  // cl_device_id does not have retain().
1617  static cl_int retain(cl_device_id)
1618  { return CL_SUCCESS; }
1619  // cl_device_id does not have release().
1620  static cl_int release(cl_device_id)
1621  { return CL_SUCCESS; }
1622 };
1623 #endif // ! (CL_HPP_TARGET_OPENCL_VERSION >= 120)
1624 
1625 template <>
1626 struct ReferenceHandler<cl_platform_id>
1627 {
1628  // cl_platform_id does not have retain().
1629  static cl_int retain(cl_platform_id)
1630  { return CL_SUCCESS; }
1631  // cl_platform_id does not have release().
1632  static cl_int release(cl_platform_id)
1633  { return CL_SUCCESS; }
1634 };
1635 
1636 template <>
1637 struct ReferenceHandler<cl_context>
1638 {
1639  static cl_int retain(cl_context context)
1640  { return ::clRetainContext(context); }
1641  static cl_int release(cl_context context)
1642  { return ::clReleaseContext(context); }
1643 };
1644 
1645 template <>
1646 struct ReferenceHandler<cl_command_queue>
1647 {
1648  static cl_int retain(cl_command_queue queue)
1649  { return ::clRetainCommandQueue(queue); }
1650  static cl_int release(cl_command_queue queue)
1651  { return ::clReleaseCommandQueue(queue); }
1652 };
1653 
1654 template <>
1655 struct ReferenceHandler<cl_mem>
1656 {
1657  static cl_int retain(cl_mem memory)
1658  { return ::clRetainMemObject(memory); }
1659  static cl_int release(cl_mem memory)
1660  { return ::clReleaseMemObject(memory); }
1661 };
1662 
1663 template <>
1664 struct ReferenceHandler<cl_sampler>
1665 {
1666  static cl_int retain(cl_sampler sampler)
1667  { return ::clRetainSampler(sampler); }
1668  static cl_int release(cl_sampler sampler)
1669  { return ::clReleaseSampler(sampler); }
1670 };
1671 
1672 template <>
1673 struct ReferenceHandler<cl_program>
1674 {
1675  static cl_int retain(cl_program program)
1676  { return ::clRetainProgram(program); }
1677  static cl_int release(cl_program program)
1678  { return ::clReleaseProgram(program); }
1679 };
1680 
1681 template <>
1682 struct ReferenceHandler<cl_kernel>
1683 {
1684  static cl_int retain(cl_kernel kernel)
1685  { return ::clRetainKernel(kernel); }
1686  static cl_int release(cl_kernel kernel)
1687  { return ::clReleaseKernel(kernel); }
1688 };
1689 
1690 template <>
1691 struct ReferenceHandler<cl_event>
1692 {
1693  static cl_int retain(cl_event event)
1694  { return ::clRetainEvent(event); }
1695  static cl_int release(cl_event event)
1696  { return ::clReleaseEvent(event); }
1697 };
1698 
1699 
1700 #if CL_HPP_TARGET_OPENCL_VERSION >= 120 && CL_HPP_MINIMUM_OPENCL_VERSION < 120
1701 // Extracts version number with major in the upper 16 bits, minor in the lower 16
1702 static cl_uint getVersion(const vector<char> &versionInfo)
1703 {
1704  int highVersion = 0;
1705  int lowVersion = 0;
1706  int index = 7;
1707  while(versionInfo[index] != '.' ) {
1708  highVersion *= 10;
1709  highVersion += versionInfo[index]-'0';
1710  ++index;
1711  }
1712  ++index;
1713  while(versionInfo[index] != ' ' && versionInfo[index] != '\0') {
1714  lowVersion *= 10;
1715  lowVersion += versionInfo[index]-'0';
1716  ++index;
1717  }
1718  return (highVersion << 16) | lowVersion;
1719 }
1720 
1721 static cl_uint getPlatformVersion(cl_platform_id platform)
1722 {
1723  size_type size = 0;
1724  clGetPlatformInfo(platform, CL_PLATFORM_VERSION, 0, NULL, &size);
1725 
1726  vector<char> versionInfo(size);
1727  clGetPlatformInfo(platform, CL_PLATFORM_VERSION, size, versionInfo.data(), &size);
1728  return getVersion(versionInfo);
1729 }
1730 
1731 static cl_uint getDevicePlatformVersion(cl_device_id device)
1732 {
1733  cl_platform_id platform;
1734  clGetDeviceInfo(device, CL_DEVICE_PLATFORM, sizeof(platform), &platform, NULL);
1735  return getPlatformVersion(platform);
1736 }
1737 
1738 static cl_uint getContextPlatformVersion(cl_context context)
1739 {
1740  // The platform cannot be queried directly, so we first have to grab a
1741  // device and obtain its context
1742  size_type size = 0;
1743  clGetContextInfo(context, CL_CONTEXT_DEVICES, 0, NULL, &size);
1744  if (size == 0)
1745  return 0;
1746  vector<cl_device_id> devices(size/sizeof(cl_device_id));
1747  clGetContextInfo(context, CL_CONTEXT_DEVICES, size, devices.data(), NULL);
1748  return getDevicePlatformVersion(devices[0]);
1749 }
1750 #endif // CL_HPP_TARGET_OPENCL_VERSION >= 120 && CL_HPP_MINIMUM_OPENCL_VERSION < 120
1751 
1752 template <typename T>
1753 class Wrapper
1754 {
1755 public:
1756  typedef T cl_type;
1757 
1758 protected:
1759  cl_type object_;
1760 
1761 public:
1762  Wrapper() : object_(NULL) { }
1763 
1764  Wrapper(const cl_type &obj, bool retainObject) : object_(obj)
1765  {
1766  if (retainObject) {
1767  detail::errHandler(retain(), __RETAIN_ERR);
1768  }
1769  }
1770 
1771  ~Wrapper()
1772  {
1773  if (object_ != NULL) { release(); }
1774  }
1775 
1776  Wrapper(const Wrapper<cl_type>& rhs)
1777  {
1778  object_ = rhs.object_;
1779  detail::errHandler(retain(), __RETAIN_ERR);
1780  }
1781 
1782  Wrapper(Wrapper<cl_type>&& rhs) CL_HPP_NOEXCEPT_
1783  {
1784  object_ = rhs.object_;
1785  rhs.object_ = NULL;
1786  }
1787 
1788  Wrapper<cl_type>& operator = (const Wrapper<cl_type>& rhs)
1789  {
1790  if (this != &rhs) {
1791  detail::errHandler(release(), __RELEASE_ERR);
1792  object_ = rhs.object_;
1793  detail::errHandler(retain(), __RETAIN_ERR);
1794  }
1795  return *this;
1796  }
1797 
1798  Wrapper<cl_type>& operator = (Wrapper<cl_type>&& rhs)
1799  {
1800  if (this != &rhs) {
1801  detail::errHandler(release(), __RELEASE_ERR);
1802  object_ = rhs.object_;
1803  rhs.object_ = NULL;
1804  }
1805  return *this;
1806  }
1807 
1808  Wrapper<cl_type>& operator = (const cl_type &rhs)
1809  {
1810  detail::errHandler(release(), __RELEASE_ERR);
1811  object_ = rhs;
1812  return *this;
1813  }
1814 
1815  const cl_type& operator ()() const { return object_; }
1816 
1817  cl_type& operator ()() { return object_; }
1818 
1819  cl_type get() const { return object_; }
1820 
1821 protected:
1822  template<typename Func, typename U>
1823  friend inline cl_int getInfoHelper(Func, cl_uint, U*, int, typename U::cl_type);
1824 
1825  cl_int retain() const
1826  {
1827  if (object_ != nullptr) {
1828  return ReferenceHandler<cl_type>::retain(object_);
1829  }
1830  else {
1831  return CL_SUCCESS;
1832  }
1833  }
1834 
1835  cl_int release() const
1836  {
1837  if (object_ != nullptr) {
1838  return ReferenceHandler<cl_type>::release(object_);
1839  }
1840  else {
1841  return CL_SUCCESS;
1842  }
1843  }
1844 };
1845 
1846 template <>
1847 class Wrapper<cl_device_id>
1848 {
1849 public:
1850  typedef cl_device_id cl_type;
1851 
1852 protected:
1853  cl_type object_;
1854  bool referenceCountable_;
1855 
1856  static bool isReferenceCountable(cl_device_id device)
1857  {
1858  bool retVal = false;
1859 #if CL_HPP_TARGET_OPENCL_VERSION >= 120
1860 #if CL_HPP_MINIMUM_OPENCL_VERSION < 120
1861  if (device != NULL) {
1862  int version = getDevicePlatformVersion(device);
1863  if(version > ((1 << 16) + 1)) {
1864  retVal = true;
1865  }
1866  }
1867 #else // CL_HPP_MINIMUM_OPENCL_VERSION < 120
1868  retVal = true;
1869 #endif // CL_HPP_MINIMUM_OPENCL_VERSION < 120
1870 #endif // CL_HPP_TARGET_OPENCL_VERSION >= 120
1871  return retVal;
1872  }
1873 
1874 public:
1875  Wrapper() : object_(NULL), referenceCountable_(false)
1876  {
1877  }
1878 
1879  Wrapper(const cl_type &obj, bool retainObject) :
1880  object_(obj),
1881  referenceCountable_(false)
1882  {
1883  referenceCountable_ = isReferenceCountable(obj);
1884 
1885  if (retainObject) {
1886  detail::errHandler(retain(), __RETAIN_ERR);
1887  }
1888  }
1889 
1890  ~Wrapper()
1891  {
1892  release();
1893  }
1894 
1895  Wrapper(const Wrapper<cl_type>& rhs)
1896  {
1897  object_ = rhs.object_;
1898  referenceCountable_ = isReferenceCountable(object_);
1899  detail::errHandler(retain(), __RETAIN_ERR);
1900  }
1901 
1902  Wrapper(Wrapper<cl_type>&& rhs) CL_HPP_NOEXCEPT_
1903  {
1904  object_ = rhs.object_;
1905  referenceCountable_ = rhs.referenceCountable_;
1906  rhs.object_ = NULL;
1907  rhs.referenceCountable_ = false;
1908  }
1909 
1910  Wrapper<cl_type>& operator = (const Wrapper<cl_type>& rhs)
1911  {
1912  if (this != &rhs) {
1913  detail::errHandler(release(), __RELEASE_ERR);
1914  object_ = rhs.object_;
1915  referenceCountable_ = rhs.referenceCountable_;
1916  detail::errHandler(retain(), __RETAIN_ERR);
1917  }
1918  return *this;
1919  }
1920 
1921  Wrapper<cl_type>& operator = (Wrapper<cl_type>&& rhs)
1922  {
1923  if (this != &rhs) {
1924  detail::errHandler(release(), __RELEASE_ERR);
1925  object_ = rhs.object_;
1926  referenceCountable_ = rhs.referenceCountable_;
1927  rhs.object_ = NULL;
1928  rhs.referenceCountable_ = false;
1929  }
1930  return *this;
1931  }
1932 
1933  Wrapper<cl_type>& operator = (const cl_type &rhs)
1934  {
1935  detail::errHandler(release(), __RELEASE_ERR);
1936  object_ = rhs;
1937  referenceCountable_ = isReferenceCountable(object_);
1938  return *this;
1939  }
1940 
1941  const cl_type& operator ()() const { return object_; }
1942 
1943  cl_type& operator ()() { return object_; }
1944 
1945  cl_type get() const { return object_; }
1946 
1947 protected:
1948  template<typename Func, typename U>
1949  friend inline cl_int getInfoHelper(Func, cl_uint, U*, int, typename U::cl_type);
1950 
1951  template<typename Func, typename U>
1952  friend inline cl_int getInfoHelper(Func, cl_uint, vector<U>*, int, typename U::cl_type);
1953 
1954  cl_int retain() const
1955  {
1956  if( object_ != nullptr && referenceCountable_ ) {
1957  return ReferenceHandler<cl_type>::retain(object_);
1958  }
1959  else {
1960  return CL_SUCCESS;
1961  }
1962  }
1963 
1964  cl_int release() const
1965  {
1966  if (object_ != nullptr && referenceCountable_) {
1967  return ReferenceHandler<cl_type>::release(object_);
1968  }
1969  else {
1970  return CL_SUCCESS;
1971  }
1972  }
1973 };
1974 
1975 template <typename T>
1976 inline bool operator==(const Wrapper<T> &lhs, const Wrapper<T> &rhs)
1977 {
1978  return lhs() == rhs();
1979 }
1980 
1981 template <typename T>
1982 inline bool operator!=(const Wrapper<T> &lhs, const Wrapper<T> &rhs)
1983 {
1984  return !operator==(lhs, rhs);
1985 }
1986 
1987 } // namespace detail
1989 
1990 
1991 using BuildLogType = vector<std::pair<cl::Device, typename detail::param_traits<detail::cl_program_build_info, CL_PROGRAM_BUILD_LOG>::param_type>>;
1992 #if defined(CL_HPP_ENABLE_EXCEPTIONS)
1993 
1996 class BuildError : public Error
1997 {
1998 private:
1999  BuildLogType buildLogs;
2000 public:
2001  BuildError(cl_int err, const char * errStr, const BuildLogType &vec) : Error(err, errStr), buildLogs(vec)
2002  {
2003  }
2004 
2005  BuildLogType getBuildLog() const
2006  {
2007  return buildLogs;
2008  }
2009 };
2010 namespace detail {
2011  static inline cl_int buildErrHandler(
2012  cl_int err,
2013  const char * errStr,
2014  const BuildLogType &buildLogs)
2015  {
2016  if (err != CL_SUCCESS) {
2017  throw BuildError(err, errStr, buildLogs);
2018  }
2019  return err;
2020  }
2021 } // namespace detail
2022 
2023 #else
2024 namespace detail {
2025  static inline cl_int buildErrHandler(
2026  cl_int err,
2027  const char * errStr,
2028  const BuildLogType &buildLogs)
2029  {
2030  (void)buildLogs; // suppress unused variable warning
2031  (void)errStr;
2032  return err;
2033  }
2034 } // namespace detail
2035 #endif // #if defined(CL_HPP_ENABLE_EXCEPTIONS)
2036 
2037 
2043 struct ImageFormat : public cl_image_format
2044 {
2047 
2049  ImageFormat(cl_channel_order order, cl_channel_type type)
2050  {
2051  image_channel_order = order;
2052  image_channel_data_type = type;
2053  }
2054 
2057  {
2058  if (this != &rhs) {
2059  this->image_channel_data_type = rhs.image_channel_data_type;
2060  this->image_channel_order = rhs.image_channel_order;
2061  }
2062  return *this;
2063  }
2064 };
2065 
2073 class Device : public detail::Wrapper<cl_device_id>
2074 {
2075 private:
2076  static std::once_flag default_initialized_;
2077  static Device default_;
2078  static cl_int default_error_;
2079 
2085  static void makeDefault();
2086 
2092  static void makeDefaultProvided(const Device &p) {
2093  default_ = p;
2094  }
2095 
2096 public:
2097 #ifdef CL_HPP_UNIT_TEST_ENABLE
2098 
2104  static void unitTestClearDefault() {
2105  default_ = Device();
2106  }
2107 #endif // #ifdef CL_HPP_UNIT_TEST_ENABLE
2108 
2110  Device() : detail::Wrapper<cl_type>() { }
2111 
2116  explicit Device(const cl_device_id &device, bool retainObject = false) :
2117  detail::Wrapper<cl_type>(device, retainObject) { }
2118 
2124  cl_int *errResult = NULL)
2125  {
2126  std::call_once(default_initialized_, makeDefault);
2127  detail::errHandler(default_error_);
2128  if (errResult != NULL) {
2129  *errResult = default_error_;
2130  }
2131  return default_;
2132  }
2133 
2141  static Device setDefault(const Device &default_device)
2142  {
2143  std::call_once(default_initialized_, makeDefaultProvided, std::cref(default_device));
2144  detail::errHandler(default_error_);
2145  return default_;
2146  }
2147 
2152  Device& operator = (const cl_device_id& rhs)
2153  {
2155  return *this;
2156  }
2157 
2161  Device(const Device& dev) : detail::Wrapper<cl_type>(dev) {}
2162 
2167  {
2169  return *this;
2170  }
2171 
2175  Device(Device&& dev) CL_HPP_NOEXCEPT_ : detail::Wrapper<cl_type>(std::move(dev)) {}
2176 
2181  {
2182  detail::Wrapper<cl_type>::operator=(std::move(dev));
2183  return *this;
2184  }
2185 
2187  template <typename T>
2188  cl_int getInfo(cl_device_info name, T* param) const
2189  {
2190  return detail::errHandler(
2191  detail::getInfo(&::clGetDeviceInfo, object_, name, param),
2192  __GET_DEVICE_INFO_ERR);
2193  }
2194 
2196  template <cl_device_info name> typename
2198  getInfo(cl_int* err = NULL) const
2199  {
2200  typename detail::param_traits<
2201  detail::cl_device_info, name>::param_type param;
2202  cl_int result = getInfo(name, &param);
2203  if (err != NULL) {
2204  *err = result;
2205  }
2206  return param;
2207  }
2208 
2209 
2210 #if CL_HPP_TARGET_OPENCL_VERSION >= 210
2211 
2217  cl_ulong getHostTimer(cl_int *error = nullptr)
2218  {
2219  cl_ulong retVal = 0;
2220  cl_int err =
2221  clGetHostTimer(this->get(), &retVal);
2222  detail::errHandler(
2223  err,
2224  __GET_HOST_TIMER_ERR);
2225  if (error) {
2226  *error = err;
2227  }
2228  return retVal;
2229  }
2230 
2241  std::pair<cl_ulong, cl_ulong> getDeviceAndHostTimer(cl_int *error = nullptr)
2242  {
2243  std::pair<cl_ulong, cl_ulong> retVal;
2244  cl_int err =
2245  clGetDeviceAndHostTimer(this->get(), &(retVal.first), &(retVal.second));
2246  detail::errHandler(
2247  err,
2248  __GET_DEVICE_AND_HOST_TIMER_ERR);
2249  if (error) {
2250  *error = err;
2251  }
2252  return retVal;
2253  }
2254 #endif // #if CL_HPP_TARGET_OPENCL_VERSION >= 210
2255 
2259 #if CL_HPP_TARGET_OPENCL_VERSION >= 120
2260  cl_int createSubDevices(
2262  const cl_device_partition_property * properties,
2263  vector<Device>* devices)
2264  {
2265  cl_uint n = 0;
2266  cl_int err = clCreateSubDevices(object_, properties, 0, NULL, &n);
2267  if (err != CL_SUCCESS) {
2268  return detail::errHandler(err, __CREATE_SUB_DEVICES_ERR);
2269  }
2270 
2271  vector<cl_device_id> ids(n);
2272  err = clCreateSubDevices(object_, properties, n, ids.data(), NULL);
2273  if (err != CL_SUCCESS) {
2274  return detail::errHandler(err, __CREATE_SUB_DEVICES_ERR);
2275  }
2276 
2277  // Cannot trivially assign because we need to capture intermediates
2278  // with safe construction
2279  if (devices) {
2280  devices->resize(ids.size());
2281 
2282  // Assign to param, constructing with retain behaviour
2283  // to correctly capture each underlying CL object
2284  for (size_type i = 0; i < ids.size(); i++) {
2285  // We do not need to retain because this device is being created
2286  // by the runtime
2287  (*devices)[i] = Device(ids[i], false);
2288  }
2289  }
2290 
2291  return CL_SUCCESS;
2292  }
2293 #elif defined(CL_HPP_USE_CL_DEVICE_FISSION)
2294 
2298  cl_int createSubDevices(
2299  const cl_device_partition_property_ext * properties,
2300  vector<Device>* devices)
2301  {
2302  typedef CL_API_ENTRY cl_int
2303  ( CL_API_CALL * PFN_clCreateSubDevicesEXT)(
2304  cl_device_id /*in_device*/,
2305  const cl_device_partition_property_ext * /* properties */,
2306  cl_uint /*num_entries*/,
2307  cl_device_id * /*out_devices*/,
2308  cl_uint * /*num_devices*/ ) CL_EXT_SUFFIX__VERSION_1_1;
2309 
2310  static PFN_clCreateSubDevicesEXT pfn_clCreateSubDevicesEXT = NULL;
2311  CL_HPP_INIT_CL_EXT_FCN_PTR_(clCreateSubDevicesEXT);
2312 
2313  cl_uint n = 0;
2314  cl_int err = pfn_clCreateSubDevicesEXT(object_, properties, 0, NULL, &n);
2315  if (err != CL_SUCCESS) {
2316  return detail::errHandler(err, __CREATE_SUB_DEVICES_ERR);
2317  }
2318 
2319  vector<cl_device_id> ids(n);
2320  err = pfn_clCreateSubDevicesEXT(object_, properties, n, ids.data(), NULL);
2321  if (err != CL_SUCCESS) {
2322  return detail::errHandler(err, __CREATE_SUB_DEVICES_ERR);
2323  }
2324  // Cannot trivially assign because we need to capture intermediates
2325  // with safe construction
2326  if (devices) {
2327  devices->resize(ids.size());
2328 
2329  // Assign to param, constructing with retain behaviour
2330  // to correctly capture each underlying CL object
2331  for (size_type i = 0; i < ids.size(); i++) {
2332  // We do not need to retain because this device is being created
2333  // by the runtime
2334  (*devices)[i] = Device(ids[i], false);
2335  }
2336  }
2337  return CL_SUCCESS;
2338  }
2339 #endif // defined(CL_HPP_USE_CL_DEVICE_FISSION)
2340 };
2341 
2342 CL_HPP_DEFINE_STATIC_MEMBER_ std::once_flag Device::default_initialized_;
2343 CL_HPP_DEFINE_STATIC_MEMBER_ Device Device::default_;
2344 CL_HPP_DEFINE_STATIC_MEMBER_ cl_int Device::default_error_ = CL_SUCCESS;
2345 
2353 class Platform : public detail::Wrapper<cl_platform_id>
2354 {
2355 private:
2356  static std::once_flag default_initialized_;
2357  static Platform default_;
2358  static cl_int default_error_;
2359 
2365  static void makeDefault() {
2366  /* Throwing an exception from a call_once invocation does not do
2367  * what we wish, so we catch it and save the error.
2368  */
2369 #if defined(CL_HPP_ENABLE_EXCEPTIONS)
2370  try
2371 #endif
2372  {
2373  // If default wasn't passed ,generate one
2374  // Otherwise set it
2375  cl_uint n = 0;
2376 
2377  cl_int err = ::clGetPlatformIDs(0, NULL, &n);
2378  if (err != CL_SUCCESS) {
2379  default_error_ = err;
2380  return;
2381  }
2382  if (n == 0) {
2383  default_error_ = CL_INVALID_PLATFORM;
2384  return;
2385  }
2386 
2387  vector<cl_platform_id> ids(n);
2388  err = ::clGetPlatformIDs(n, ids.data(), NULL);
2389  if (err != CL_SUCCESS) {
2390  default_error_ = err;
2391  return;
2392  }
2393 
2394  default_ = Platform(ids[0]);
2395  }
2396 #if defined(CL_HPP_ENABLE_EXCEPTIONS)
2397  catch (cl::Error &e) {
2398  default_error_ = e.err();
2399  }
2400 #endif
2401  }
2402 
2408  static void makeDefaultProvided(const Platform &p) {
2409  default_ = p;
2410  }
2411 
2412 public:
2413 #ifdef CL_HPP_UNIT_TEST_ENABLE
2414 
2420  static void unitTestClearDefault() {
2421  default_ = Platform();
2422  }
2423 #endif // #ifdef CL_HPP_UNIT_TEST_ENABLE
2424 
2426  Platform() : detail::Wrapper<cl_type>() { }
2427 
2435  explicit Platform(const cl_platform_id &platform, bool retainObject = false) :
2436  detail::Wrapper<cl_type>(platform, retainObject) { }
2437 
2442  Platform& operator = (const cl_platform_id& rhs)
2443  {
2445  return *this;
2446  }
2447 
2448  static Platform getDefault(
2449  cl_int *errResult = NULL)
2450  {
2451  std::call_once(default_initialized_, makeDefault);
2452  detail::errHandler(default_error_);
2453  if (errResult != NULL) {
2454  *errResult = default_error_;
2455  }
2456  return default_;
2457  }
2458 
2466  static Platform setDefault(const Platform &default_platform)
2467  {
2468  std::call_once(default_initialized_, makeDefaultProvided, std::cref(default_platform));
2469  detail::errHandler(default_error_);
2470  return default_;
2471  }
2472 
2474  template <typename T>
2475  cl_int getInfo(cl_platform_info name, T* param) const
2476  {
2477  return detail::errHandler(
2478  detail::getInfo(&::clGetPlatformInfo, object_, name, param),
2479  __GET_PLATFORM_INFO_ERR);
2480  }
2481 
2483  template <cl_platform_info name> typename
2485  getInfo(cl_int* err = NULL) const
2486  {
2487  typename detail::param_traits<
2488  detail::cl_platform_info, name>::param_type param;
2489  cl_int result = getInfo(name, &param);
2490  if (err != NULL) {
2491  *err = result;
2492  }
2493  return param;
2494  }
2495 
2500  cl_int getDevices(
2501  cl_device_type type,
2502  vector<Device>* devices) const
2503  {
2504  cl_uint n = 0;
2505  if( devices == NULL ) {
2506  return detail::errHandler(CL_INVALID_ARG_VALUE, __GET_DEVICE_IDS_ERR);
2507  }
2508  cl_int err = ::clGetDeviceIDs(object_, type, 0, NULL, &n);
2509  if (err != CL_SUCCESS && err != CL_DEVICE_NOT_FOUND) {
2510  return detail::errHandler(err, __GET_DEVICE_IDS_ERR);
2511  }
2512 
2513  vector<cl_device_id> ids(n);
2514  if (n>0) {
2515  err = ::clGetDeviceIDs(object_, type, n, ids.data(), NULL);
2516  if (err != CL_SUCCESS) {
2517  return detail::errHandler(err, __GET_DEVICE_IDS_ERR);
2518  }
2519  }
2520 
2521  // Cannot trivially assign because we need to capture intermediates
2522  // with safe construction
2523  // We must retain things we obtain from the API to avoid releasing
2524  // API-owned objects.
2525  if (devices) {
2526  devices->resize(ids.size());
2527 
2528  // Assign to param, constructing with retain behaviour
2529  // to correctly capture each underlying CL object
2530  for (size_type i = 0; i < ids.size(); i++) {
2531  (*devices)[i] = Device(ids[i], true);
2532  }
2533  }
2534  return CL_SUCCESS;
2535  }
2536 
2537 #if defined(CL_HPP_USE_DX_INTEROP)
2538 
2561  cl_int getDevices(
2562  cl_d3d10_device_source_khr d3d_device_source,
2563  void * d3d_object,
2564  cl_d3d10_device_set_khr d3d_device_set,
2565  vector<Device>* devices) const
2566  {
2567  typedef CL_API_ENTRY cl_int (CL_API_CALL *PFN_clGetDeviceIDsFromD3D10KHR)(
2568  cl_platform_id platform,
2569  cl_d3d10_device_source_khr d3d_device_source,
2570  void * d3d_object,
2571  cl_d3d10_device_set_khr d3d_device_set,
2572  cl_uint num_entries,
2573  cl_device_id * devices,
2574  cl_uint* num_devices);
2575 
2576  if( devices == NULL ) {
2577  return detail::errHandler(CL_INVALID_ARG_VALUE, __GET_DEVICE_IDS_ERR);
2578  }
2579 
2580  static PFN_clGetDeviceIDsFromD3D10KHR pfn_clGetDeviceIDsFromD3D10KHR = NULL;
2581  CL_HPP_INIT_CL_EXT_FCN_PTR_PLATFORM_(object_, clGetDeviceIDsFromD3D10KHR);
2582 
2583  cl_uint n = 0;
2584  cl_int err = pfn_clGetDeviceIDsFromD3D10KHR(
2585  object_,
2586  d3d_device_source,
2587  d3d_object,
2588  d3d_device_set,
2589  0,
2590  NULL,
2591  &n);
2592  if (err != CL_SUCCESS) {
2593  return detail::errHandler(err, __GET_DEVICE_IDS_ERR);
2594  }
2595 
2596  vector<cl_device_id> ids(n);
2597  err = pfn_clGetDeviceIDsFromD3D10KHR(
2598  object_,
2599  d3d_device_source,
2600  d3d_object,
2601  d3d_device_set,
2602  n,
2603  ids.data(),
2604  NULL);
2605  if (err != CL_SUCCESS) {
2606  return detail::errHandler(err, __GET_DEVICE_IDS_ERR);
2607  }
2608 
2609  // Cannot trivially assign because we need to capture intermediates
2610  // with safe construction
2611  // We must retain things we obtain from the API to avoid releasing
2612  // API-owned objects.
2613  if (devices) {
2614  devices->resize(ids.size());
2615 
2616  // Assign to param, constructing with retain behaviour
2617  // to correctly capture each underlying CL object
2618  for (size_type i = 0; i < ids.size(); i++) {
2619  (*devices)[i] = Device(ids[i], true);
2620  }
2621  }
2622  return CL_SUCCESS;
2623  }
2624 #endif
2625 
2630  static cl_int get(
2631  vector<Platform>* platforms)
2632  {
2633  cl_uint n = 0;
2634 
2635  if( platforms == NULL ) {
2636  return detail::errHandler(CL_INVALID_ARG_VALUE, __GET_PLATFORM_IDS_ERR);
2637  }
2638 
2639  cl_int err = ::clGetPlatformIDs(0, NULL, &n);
2640  if (err != CL_SUCCESS) {
2641  return detail::errHandler(err, __GET_PLATFORM_IDS_ERR);
2642  }
2643 
2644  vector<cl_platform_id> ids(n);
2645  err = ::clGetPlatformIDs(n, ids.data(), NULL);
2646  if (err != CL_SUCCESS) {
2647  return detail::errHandler(err, __GET_PLATFORM_IDS_ERR);
2648  }
2649 
2650  if (platforms) {
2651  platforms->resize(ids.size());
2652 
2653  // Platforms don't reference count
2654  for (size_type i = 0; i < ids.size(); i++) {
2655  (*platforms)[i] = Platform(ids[i]);
2656  }
2657  }
2658  return CL_SUCCESS;
2659  }
2660 
2665  static cl_int get(
2666  Platform * platform)
2667  {
2668  cl_int err;
2669  Platform default_platform = Platform::getDefault(&err);
2670  if (platform) {
2671  *platform = default_platform;
2672  }
2673  return err;
2674  }
2675 
2684  static Platform get(
2685  cl_int * errResult = NULL)
2686  {
2687  cl_int err;
2688  Platform default_platform = Platform::getDefault(&err);
2689  if (errResult) {
2690  *errResult = err;
2691  }
2692  return default_platform;
2693  }
2694 
2695 #if CL_HPP_TARGET_OPENCL_VERSION >= 120
2696  cl_int
2699  {
2700  return ::clUnloadPlatformCompiler(object_);
2701  }
2702 #endif // CL_HPP_TARGET_OPENCL_VERSION >= 120
2703 }; // class Platform
2704 
2705 CL_HPP_DEFINE_STATIC_MEMBER_ std::once_flag Platform::default_initialized_;
2706 CL_HPP_DEFINE_STATIC_MEMBER_ Platform Platform::default_;
2707 CL_HPP_DEFINE_STATIC_MEMBER_ cl_int Platform::default_error_ = CL_SUCCESS;
2708 
2709 
2713 #if defined(CL_USE_DEPRECATED_OPENCL_1_1_APIS)
2714 
2718 inline CL_EXT_PREFIX__VERSION_1_1_DEPRECATED cl_int
2719 UnloadCompiler() CL_EXT_SUFFIX__VERSION_1_1_DEPRECATED;
2720 inline cl_int
2722 {
2723  return ::clUnloadCompiler();
2724 }
2725 #endif // #if defined(CL_USE_DEPRECATED_OPENCL_1_1_APIS)
2726 
2735 class Context
2736  : public detail::Wrapper<cl_context>
2737 {
2738 private:
2739  static std::once_flag default_initialized_;
2740  static Context default_;
2741  static cl_int default_error_;
2742 
2748  static void makeDefault() {
2749  /* Throwing an exception from a call_once invocation does not do
2750  * what we wish, so we catch it and save the error.
2751  */
2752 #if defined(CL_HPP_ENABLE_EXCEPTIONS)
2753  try
2754 #endif
2755  {
2756 #if !defined(__APPLE__) && !defined(__MACOS)
2757  const Platform &p = Platform::getDefault();
2758  cl_platform_id defaultPlatform = p();
2759  cl_context_properties properties[3] = {
2760  CL_CONTEXT_PLATFORM, (cl_context_properties)defaultPlatform, 0
2761  };
2762 #else // #if !defined(__APPLE__) && !defined(__MACOS)
2763  cl_context_properties *properties = nullptr;
2764 #endif // #if !defined(__APPLE__) && !defined(__MACOS)
2765 
2766  default_ = Context(
2767  CL_DEVICE_TYPE_DEFAULT,
2768  properties,
2769  NULL,
2770  NULL,
2771  &default_error_);
2772  }
2773 #if defined(CL_HPP_ENABLE_EXCEPTIONS)
2774  catch (cl::Error &e) {
2775  default_error_ = e.err();
2776  }
2777 #endif
2778  }
2779 
2780 
2786  static void makeDefaultProvided(const Context &c) {
2787  default_ = c;
2788  }
2789 
2790 public:
2791 #ifdef CL_HPP_UNIT_TEST_ENABLE
2792 
2798  static void unitTestClearDefault() {
2799  default_ = Context();
2800  }
2801 #endif // #ifdef CL_HPP_UNIT_TEST_ENABLE
2802 
2808  const vector<Device>& devices,
2809  cl_context_properties* properties = NULL,
2810  void (CL_CALLBACK * notifyFptr)(
2811  const char *,
2812  const void *,
2813  size_type,
2814  void *) = NULL,
2815  void* data = NULL,
2816  cl_int* err = NULL)
2817  {
2818  cl_int error;
2819 
2820  size_type numDevices = devices.size();
2821  vector<cl_device_id> deviceIDs(numDevices);
2822 
2823  for( size_type deviceIndex = 0; deviceIndex < numDevices; ++deviceIndex ) {
2824  deviceIDs[deviceIndex] = (devices[deviceIndex])();
2825  }
2826 
2827  object_ = ::clCreateContext(
2828  properties, (cl_uint) numDevices,
2829  deviceIDs.data(),
2830  notifyFptr, data, &error);
2831 
2832  detail::errHandler(error, __CREATE_CONTEXT_ERR);
2833  if (err != NULL) {
2834  *err = error;
2835  }
2836  }
2837 
2838  Context(
2839  const Device& device,
2840  cl_context_properties* properties = NULL,
2841  void (CL_CALLBACK * notifyFptr)(
2842  const char *,
2843  const void *,
2844  size_type,
2845  void *) = NULL,
2846  void* data = NULL,
2847  cl_int* err = NULL)
2848  {
2849  cl_int error;
2850 
2851  cl_device_id deviceID = device();
2852 
2853  object_ = ::clCreateContext(
2854  properties, 1,
2855  &deviceID,
2856  notifyFptr, data, &error);
2857 
2858  detail::errHandler(error, __CREATE_CONTEXT_ERR);
2859  if (err != NULL) {
2860  *err = error;
2861  }
2862  }
2863 
2869  cl_device_type type,
2870  cl_context_properties* properties = NULL,
2871  void (CL_CALLBACK * notifyFptr)(
2872  const char *,
2873  const void *,
2874  size_type,
2875  void *) = NULL,
2876  void* data = NULL,
2877  cl_int* err = NULL)
2878  {
2879  cl_int error;
2880 
2881 #if !defined(__APPLE__) && !defined(__MACOS)
2882  cl_context_properties prop[4] = {CL_CONTEXT_PLATFORM, 0, 0, 0 };
2883 
2884  if (properties == NULL) {
2885  // Get a valid platform ID as we cannot send in a blank one
2886  vector<Platform> platforms;
2887  error = Platform::get(&platforms);
2888  if (error != CL_SUCCESS) {
2889  detail::errHandler(error, __CREATE_CONTEXT_FROM_TYPE_ERR);
2890  if (err != NULL) {
2891  *err = error;
2892  }
2893  return;
2894  }
2895 
2896  // Check the platforms we found for a device of our specified type
2897  cl_context_properties platform_id = 0;
2898  for (unsigned int i = 0; i < platforms.size(); i++) {
2899 
2900  vector<Device> devices;
2901 
2902 #if defined(CL_HPP_ENABLE_EXCEPTIONS)
2903  try {
2904 #endif
2905 
2906  error = platforms[i].getDevices(type, &devices);
2907 
2908 #if defined(CL_HPP_ENABLE_EXCEPTIONS)
2909  } catch (cl::Error& e) {
2910  error = e.err();
2911  }
2912  // Catch if exceptions are enabled as we don't want to exit if first platform has no devices of type
2913  // We do error checking next anyway, and can throw there if needed
2914 #endif
2915 
2916  // Only squash CL_SUCCESS and CL_DEVICE_NOT_FOUND
2917  if (error != CL_SUCCESS && error != CL_DEVICE_NOT_FOUND) {
2918  detail::errHandler(error, __CREATE_CONTEXT_FROM_TYPE_ERR);
2919  if (err != NULL) {
2920  *err = error;
2921  }
2922  }
2923 
2924  if (devices.size() > 0) {
2925  platform_id = (cl_context_properties)platforms[i]();
2926  break;
2927  }
2928  }
2929 
2930  if (platform_id == 0) {
2931  detail::errHandler(CL_DEVICE_NOT_FOUND, __CREATE_CONTEXT_FROM_TYPE_ERR);
2932  if (err != NULL) {
2933  *err = CL_DEVICE_NOT_FOUND;
2934  }
2935  return;
2936  }
2937 
2938  prop[1] = platform_id;
2939  properties = &prop[0];
2940  }
2941 #endif
2942  object_ = ::clCreateContextFromType(
2943  properties, type, notifyFptr, data, &error);
2944 
2945  detail::errHandler(error, __CREATE_CONTEXT_FROM_TYPE_ERR);
2946  if (err != NULL) {
2947  *err = error;
2948  }
2949  }
2950 
2954  Context(const Context& ctx) : detail::Wrapper<cl_type>(ctx) {}
2955 
2960  {
2962  return *this;
2963  }
2964 
2968  Context(Context&& ctx) CL_HPP_NOEXCEPT_ : detail::Wrapper<cl_type>(std::move(ctx)) {}
2969 
2974  {
2975  detail::Wrapper<cl_type>::operator=(std::move(ctx));
2976  return *this;
2977  }
2978 
2979 
2984  static Context getDefault(cl_int * err = NULL)
2985  {
2986  std::call_once(default_initialized_, makeDefault);
2987  detail::errHandler(default_error_);
2988  if (err != NULL) {
2989  *err = default_error_;
2990  }
2991  return default_;
2992  }
2993 
3001  static Context setDefault(const Context &default_context)
3002  {
3003  std::call_once(default_initialized_, makeDefaultProvided, std::cref(default_context));
3004  detail::errHandler(default_error_);
3005  return default_;
3006  }
3007 
3009  Context() : detail::Wrapper<cl_type>() { }
3010 
3016  explicit Context(const cl_context& context, bool retainObject = false) :
3017  detail::Wrapper<cl_type>(context, retainObject) { }
3018 
3024  Context& operator = (const cl_context& rhs)
3025  {
3027  return *this;
3028  }
3029 
3031  template <typename T>
3032  cl_int getInfo(cl_context_info name, T* param) const
3033  {
3034  return detail::errHandler(
3035  detail::getInfo(&::clGetContextInfo, object_, name, param),
3036  __GET_CONTEXT_INFO_ERR);
3037  }
3038 
3040  template <cl_context_info name> typename
3042  getInfo(cl_int* err = NULL) const
3043  {
3044  typename detail::param_traits<
3045  detail::cl_context_info, name>::param_type param;
3046  cl_int result = getInfo(name, &param);
3047  if (err != NULL) {
3048  *err = result;
3049  }
3050  return param;
3051  }
3052 
3058  cl_mem_flags flags,
3059  cl_mem_object_type type,
3060  vector<ImageFormat>* formats) const
3061  {
3062  cl_uint numEntries;
3063 
3064  if (!formats) {
3065  return CL_SUCCESS;
3066  }
3067 
3068  cl_int err = ::clGetSupportedImageFormats(
3069  object_,
3070  flags,
3071  type,
3072  0,
3073  NULL,
3074  &numEntries);
3075  if (err != CL_SUCCESS) {
3076  return detail::errHandler(err, __GET_SUPPORTED_IMAGE_FORMATS_ERR);
3077  }
3078 
3079  if (numEntries > 0) {
3080  vector<ImageFormat> value(numEntries);
3081  err = ::clGetSupportedImageFormats(
3082  object_,
3083  flags,
3084  type,
3085  numEntries,
3086  (cl_image_format*)value.data(),
3087  NULL);
3088  if (err != CL_SUCCESS) {
3089  return detail::errHandler(err, __GET_SUPPORTED_IMAGE_FORMATS_ERR);
3090  }
3091 
3092  formats->assign(begin(value), end(value));
3093  }
3094  else {
3095  // If no values are being returned, ensure an empty vector comes back
3096  formats->clear();
3097  }
3098 
3099  return CL_SUCCESS;
3100  }
3101 };
3102 
3103 inline void Device::makeDefault()
3104 {
3105  /* Throwing an exception from a call_once invocation does not do
3106  * what we wish, so we catch it and save the error.
3107  */
3108 #if defined(CL_HPP_ENABLE_EXCEPTIONS)
3109  try
3110 #endif
3111  {
3112  cl_int error = 0;
3113 
3114  Context context = Context::getDefault(&error);
3115  detail::errHandler(error, __CREATE_CONTEXT_ERR);
3116 
3117  if (error != CL_SUCCESS) {
3118  default_error_ = error;
3119  }
3120  else {
3121  default_ = context.getInfo<CL_CONTEXT_DEVICES>()[0];
3122  default_error_ = CL_SUCCESS;
3123  }
3124  }
3125 #if defined(CL_HPP_ENABLE_EXCEPTIONS)
3126  catch (cl::Error &e) {
3127  default_error_ = e.err();
3128  }
3129 #endif
3130 }
3131 
3132 CL_HPP_DEFINE_STATIC_MEMBER_ std::once_flag Context::default_initialized_;
3133 CL_HPP_DEFINE_STATIC_MEMBER_ Context Context::default_;
3134 CL_HPP_DEFINE_STATIC_MEMBER_ cl_int Context::default_error_ = CL_SUCCESS;
3135 
3144 class Event : public detail::Wrapper<cl_event>
3145 {
3146 public:
3148  Event() : detail::Wrapper<cl_type>() { }
3149 
3158  explicit Event(const cl_event& event, bool retainObject = false) :
3159  detail::Wrapper<cl_type>(event, retainObject) { }
3160 
3166  Event& operator = (const cl_event& rhs)
3167  {
3169  return *this;
3170  }
3171 
3173  template <typename T>
3174  cl_int getInfo(cl_event_info name, T* param) const
3175  {
3176  return detail::errHandler(
3177  detail::getInfo(&::clGetEventInfo, object_, name, param),
3178  __GET_EVENT_INFO_ERR);
3179  }
3180 
3182  template <cl_event_info name> typename
3184  getInfo(cl_int* err = NULL) const
3185  {
3186  typename detail::param_traits<
3187  detail::cl_event_info, name>::param_type param;
3188  cl_int result = getInfo(name, &param);
3189  if (err != NULL) {
3190  *err = result;
3191  }
3192  return param;
3193  }
3194 
3196  template <typename T>
3197  cl_int getProfilingInfo(cl_profiling_info name, T* param) const
3198  {
3199  return detail::errHandler(detail::getInfo(
3200  &::clGetEventProfilingInfo, object_, name, param),
3201  __GET_EVENT_PROFILE_INFO_ERR);
3202  }
3203 
3205  template <cl_profiling_info name> typename
3207  getProfilingInfo(cl_int* err = NULL) const
3208  {
3209  typename detail::param_traits<
3210  detail::cl_profiling_info, name>::param_type param;
3211  cl_int result = getProfilingInfo(name, &param);
3212  if (err != NULL) {
3213  *err = result;
3214  }
3215  return param;
3216  }
3217 
3222  cl_int wait() const
3223  {
3224  return detail::errHandler(
3225  ::clWaitForEvents(1, &object_),
3226  __WAIT_FOR_EVENTS_ERR);
3227  }
3228 
3229 #if CL_HPP_TARGET_OPENCL_VERSION >= 110
3230 
3234  cl_int setCallback(
3235  cl_int type,
3236  void (CL_CALLBACK * pfn_notify)(cl_event, cl_int, void *),
3237  void * user_data = NULL)
3238  {
3239  return detail::errHandler(
3240  ::clSetEventCallback(
3241  object_,
3242  type,
3243  pfn_notify,
3244  user_data),
3245  __SET_EVENT_CALLBACK_ERR);
3246  }
3247 #endif // CL_HPP_TARGET_OPENCL_VERSION >= 110
3248 
3253  static cl_int
3254  waitForEvents(const vector<Event>& events)
3255  {
3256  return detail::errHandler(
3257  ::clWaitForEvents(
3258  (cl_uint) events.size(), (events.size() > 0) ? (cl_event*)&events.front() : NULL),
3259  __WAIT_FOR_EVENTS_ERR);
3260  }
3261 };
3262 
3263 #if CL_HPP_TARGET_OPENCL_VERSION >= 110
3264 
3268 class UserEvent : public Event
3269 {
3270 public:
3276  const Context& context,
3277  cl_int * err = NULL)
3278  {
3279  cl_int error;
3280  object_ = ::clCreateUserEvent(
3281  context(),
3282  &error);
3283 
3284  detail::errHandler(error, __CREATE_USER_EVENT_ERR);
3285  if (err != NULL) {
3286  *err = error;
3287  }
3288  }
3289 
3291  UserEvent() : Event() { }
3292 
3297  cl_int setStatus(cl_int status)
3298  {
3299  return detail::errHandler(
3300  ::clSetUserEventStatus(object_,status),
3301  __SET_USER_EVENT_STATUS_ERR);
3302  }
3303 };
3304 #endif // CL_HPP_TARGET_OPENCL_VERSION >= 110
3305 
3310 inline static cl_int
3311 WaitForEvents(const vector<Event>& events)
3312 {
3313  return detail::errHandler(
3314  ::clWaitForEvents(
3315  (cl_uint) events.size(), (events.size() > 0) ? (cl_event*)&events.front() : NULL),
3316  __WAIT_FOR_EVENTS_ERR);
3317 }
3318 
3327 class Memory : public detail::Wrapper<cl_mem>
3328 {
3329 public:
3331  Memory() : detail::Wrapper<cl_type>() { }
3332 
3344  explicit Memory(const cl_mem& memory, bool retainObject) :
3345  detail::Wrapper<cl_type>(memory, retainObject) { }
3346 
3352  Memory& operator = (const cl_mem& rhs)
3353  {
3355  return *this;
3356  }
3357 
3361  Memory(const Memory& mem) : detail::Wrapper<cl_type>(mem) {}
3362 
3367  {
3369  return *this;
3370  }
3371 
3375  Memory(Memory&& mem) CL_HPP_NOEXCEPT_ : detail::Wrapper<cl_type>(std::move(mem)) {}
3376 
3381  {
3382  detail::Wrapper<cl_type>::operator=(std::move(mem));
3383  return *this;
3384  }
3385 
3386 
3388  template <typename T>
3389  cl_int getInfo(cl_mem_info name, T* param) const
3390  {
3391  return detail::errHandler(
3392  detail::getInfo(&::clGetMemObjectInfo, object_, name, param),
3393  __GET_MEM_OBJECT_INFO_ERR);
3394  }
3395 
3397  template <cl_mem_info name> typename
3399  getInfo(cl_int* err = NULL) const
3400  {
3401  typename detail::param_traits<
3402  detail::cl_mem_info, name>::param_type param;
3403  cl_int result = getInfo(name, &param);
3404  if (err != NULL) {
3405  *err = result;
3406  }
3407  return param;
3408  }
3409 
3410 #if CL_HPP_TARGET_OPENCL_VERSION >= 110
3411 
3425  void (CL_CALLBACK * pfn_notify)(cl_mem, void *),
3426  void * user_data = NULL)
3427  {
3428  return detail::errHandler(
3429  ::clSetMemObjectDestructorCallback(
3430  object_,
3431  pfn_notify,
3432  user_data),
3433  __SET_MEM_OBJECT_DESTRUCTOR_CALLBACK_ERR);
3434  }
3435 #endif // CL_HPP_TARGET_OPENCL_VERSION >= 110
3436 
3437 };
3438 
3439 // Pre-declare copy functions
3440 class Buffer;
3441 template< typename IteratorType >
3442 cl_int copy( IteratorType startIterator, IteratorType endIterator, cl::Buffer &buffer );
3443 template< typename IteratorType >
3444 cl_int copy( const cl::Buffer &buffer, IteratorType startIterator, IteratorType endIterator );
3445 template< typename IteratorType >
3446 cl_int copy( const CommandQueue &queue, IteratorType startIterator, IteratorType endIterator, cl::Buffer &buffer );
3447 template< typename IteratorType >
3448 cl_int copy( const CommandQueue &queue, const cl::Buffer &buffer, IteratorType startIterator, IteratorType endIterator );
3449 
3450 
3451 #if CL_HPP_TARGET_OPENCL_VERSION >= 200
3452 namespace detail
3453 {
3455  {
3456  public:
3457  static cl_svm_mem_flags getSVMMemFlags()
3458  {
3459  return 0;
3460  }
3461  };
3462 } // namespace detail
3463 
3464 template<class Trait = detail::SVMTraitNull>
3466 {
3467 public:
3468  static cl_svm_mem_flags getSVMMemFlags()
3469  {
3470  return CL_MEM_READ_WRITE |
3471  Trait::getSVMMemFlags();
3472  }
3473 };
3474 
3475 template<class Trait = detail::SVMTraitNull>
3477 {
3478 public:
3479  static cl_svm_mem_flags getSVMMemFlags()
3480  {
3481  return CL_MEM_READ_ONLY |
3482  Trait::getSVMMemFlags();
3483  }
3484 };
3485 
3486 template<class Trait = detail::SVMTraitNull>
3488 {
3489 public:
3490  static cl_svm_mem_flags getSVMMemFlags()
3491  {
3492  return CL_MEM_WRITE_ONLY |
3493  Trait::getSVMMemFlags();
3494  }
3495 };
3496 
3497 template<class Trait = SVMTraitReadWrite<>>
3499 {
3500 public:
3501  static cl_svm_mem_flags getSVMMemFlags()
3502  {
3503  return Trait::getSVMMemFlags();
3504  }
3505 };
3506 
3507 template<class Trait = SVMTraitReadWrite<>>
3509 {
3510 public:
3511  static cl_svm_mem_flags getSVMMemFlags()
3512  {
3513  return CL_MEM_SVM_FINE_GRAIN_BUFFER |
3514  Trait::getSVMMemFlags();
3515  }
3516 };
3517 
3518 template<class Trait = SVMTraitReadWrite<>>
3520 {
3521 public:
3522  static cl_svm_mem_flags getSVMMemFlags()
3523  {
3524  return
3525  CL_MEM_SVM_FINE_GRAIN_BUFFER |
3526  CL_MEM_SVM_ATOMICS |
3527  Trait::getSVMMemFlags();
3528  }
3529 };
3530 
3531 // Pre-declare SVM map function
3532 template<typename T>
3533 inline cl_int enqueueMapSVM(
3534  T* ptr,
3535  cl_bool blocking,
3536  cl_map_flags flags,
3537  size_type size,
3538  const vector<Event>* events = NULL,
3539  Event* event = NULL);
3540 
3552 template<typename T, class SVMTrait>
3554 private:
3555  Context context_;
3556 
3557 public:
3558  typedef T value_type;
3559  typedef value_type* pointer;
3560  typedef const value_type* const_pointer;
3561  typedef value_type& reference;
3562  typedef const value_type& const_reference;
3563  typedef std::size_t size_type;
3564  typedef std::ptrdiff_t difference_type;
3565 
3566  template<typename U>
3567  struct rebind
3568  {
3570  };
3571 
3572  template<typename U, typename V>
3573  friend class SVMAllocator;
3574 
3575  SVMAllocator() :
3576  context_(Context::getDefault())
3577  {
3578  }
3579 
3580  explicit SVMAllocator(cl::Context context) :
3581  context_(context)
3582  {
3583  }
3584 
3585 
3586  SVMAllocator(const SVMAllocator &other) :
3587  context_(other.context_)
3588  {
3589  }
3590 
3591  template<typename U>
3592  SVMAllocator(const SVMAllocator<U, SVMTrait> &other) :
3593  context_(other.context_)
3594  {
3595  }
3596 
3597  ~SVMAllocator()
3598  {
3599  }
3600 
3601  pointer address(reference r) CL_HPP_NOEXCEPT_
3602  {
3603  return std::addressof(r);
3604  }
3605 
3606  const_pointer address(const_reference r) CL_HPP_NOEXCEPT_
3607  {
3608  return std::addressof(r);
3609  }
3610 
3617  pointer allocate(
3618  size_type size,
3620  {
3621  // Allocate memory with default alignment matching the size of the type
3622  void* voidPointer =
3623  clSVMAlloc(
3624  context_(),
3625  SVMTrait::getSVMMemFlags(),
3626  size*sizeof(T),
3627  0);
3628  pointer retValue = reinterpret_cast<pointer>(
3629  voidPointer);
3630 #if defined(CL_HPP_ENABLE_EXCEPTIONS)
3631  if (!retValue) {
3632  std::bad_alloc excep;
3633  throw excep;
3634  }
3635 #endif // #if defined(CL_HPP_ENABLE_EXCEPTIONS)
3636 
3637  // If allocation was coarse-grained then map it
3638  if (!(SVMTrait::getSVMMemFlags() & CL_MEM_SVM_FINE_GRAIN_BUFFER)) {
3639  cl_int err = enqueueMapSVM(retValue, CL_TRUE, CL_MAP_READ | CL_MAP_WRITE, size*sizeof(T));
3640  if (err != CL_SUCCESS) {
3641  std::bad_alloc excep;
3642  throw excep;
3643  }
3644  }
3645 
3646  // If exceptions disabled, return null pointer from allocator
3647  return retValue;
3648  }
3649 
3650  void deallocate(pointer p, size_type)
3651  {
3652  clSVMFree(context_(), p);
3653  }
3654 
3659  size_type max_size() const CL_HPP_NOEXCEPT_
3660  {
3661  size_type maxSize = std::numeric_limits<size_type>::max() / sizeof(T);
3662 
3663  for (const Device &d : context_.getInfo<CL_CONTEXT_DEVICES>()) {
3664  maxSize = std::min(
3665  maxSize,
3666  static_cast<size_type>(d.getInfo<CL_DEVICE_MAX_MEM_ALLOC_SIZE>()));
3667  }
3668 
3669  return maxSize;
3670  }
3671 
3672  template< class U, class... Args >
3673  void construct(U* p, Args&&... args)
3674  {
3675  new(p)T(args...);
3676  }
3677 
3678  template< class U >
3679  void destroy(U* p)
3680  {
3681  p->~U();
3682  }
3683 
3687  inline bool operator==(SVMAllocator const& rhs)
3688  {
3689  return (context_==rhs.context_);
3690  }
3691 
3692  inline bool operator!=(SVMAllocator const& a)
3693  {
3694  return !operator==(a);
3695  }
3696 }; // class SVMAllocator return cl::pointer<T>(tmp, detail::Deleter<T, Alloc>{alloc, copies});
3697 
3698 
3699 template<class SVMTrait>
3700 class SVMAllocator<void, SVMTrait> {
3701 public:
3702  typedef void value_type;
3703  typedef value_type* pointer;
3704  typedef const value_type* const_pointer;
3705 
3706  template<typename U>
3707  struct rebind
3708  {
3710  };
3711 
3712  template<typename U, typename V>
3713  friend class SVMAllocator;
3714 };
3715 
3716 #if !defined(CL_HPP_NO_STD_UNIQUE_PTR)
3717 namespace detail
3718 {
3719  template<class Alloc>
3720  class Deleter {
3721  private:
3722  Alloc alloc_;
3723  size_type copies_;
3724 
3725  public:
3726  typedef typename std::allocator_traits<Alloc>::pointer pointer;
3727 
3728  Deleter(const Alloc &alloc, size_type copies) : alloc_{ alloc }, copies_{ copies }
3729  {
3730  }
3731 
3732  void operator()(pointer ptr) const {
3733  Alloc tmpAlloc{ alloc_ };
3734  std::allocator_traits<Alloc>::destroy(tmpAlloc, std::addressof(*ptr));
3735  std::allocator_traits<Alloc>::deallocate(tmpAlloc, ptr, copies_);
3736  }
3737  };
3738 } // namespace detail
3739 
3746 template <class T, class Alloc, class... Args>
3747 cl::pointer<T, detail::Deleter<Alloc>> allocate_pointer(const Alloc &alloc_, Args&&... args)
3748 {
3749  Alloc alloc(alloc_);
3750  static const size_type copies = 1;
3751 
3752  // Ensure that creation of the management block and the
3753  // object are dealt with separately such that we only provide a deleter
3754 
3755  T* tmp = std::allocator_traits<Alloc>::allocate(alloc, copies);
3756  if (!tmp) {
3757  std::bad_alloc excep;
3758  throw excep;
3759  }
3760  try {
3761  std::allocator_traits<Alloc>::construct(
3762  alloc,
3763  std::addressof(*tmp),
3764  std::forward<Args>(args)...);
3765 
3766  return cl::pointer<T, detail::Deleter<Alloc>>(tmp, detail::Deleter<Alloc>{alloc, copies});
3767  }
3768  catch (std::bad_alloc& b)
3769  {
3770  std::allocator_traits<Alloc>::deallocate(alloc, tmp, copies);
3771  throw;
3772  }
3773 }
3774 
3775 template< class T, class SVMTrait, class... Args >
3776 cl::pointer<T, detail::Deleter<SVMAllocator<T, SVMTrait>>> allocate_svm(Args... args)
3777 {
3778  SVMAllocator<T, SVMTrait> alloc;
3779  return cl::allocate_pointer<T>(alloc, args...);
3780 }
3781 
3782 template< class T, class SVMTrait, class... Args >
3783 cl::pointer<T, detail::Deleter<SVMAllocator<T, SVMTrait>>> allocate_svm(const cl::Context &c, Args... args)
3784 {
3785  SVMAllocator<T, SVMTrait> alloc(c);
3786  return cl::allocate_pointer<T>(alloc, args...);
3787 }
3788 #endif // #if !defined(CL_HPP_NO_STD_UNIQUE_PTR)
3789 
3793 template < class T >
3794 using coarse_svm_vector = vector<T, cl::SVMAllocator<int, cl::SVMTraitCoarse<>>>;
3795 
3799 template < class T >
3800 using fine_svm_vector = vector<T, cl::SVMAllocator<int, cl::SVMTraitFine<>>>;
3801 
3805 template < class T >
3806 using atomic_svm_vector = vector<T, cl::SVMAllocator<int, cl::SVMTraitAtomic<>>>;
3807 
3808 #endif // #if CL_HPP_TARGET_OPENCL_VERSION >= 200
3809 
3810 
3817 class Buffer : public Memory
3818 {
3819 public:
3820 
3829  const Context& context,
3830  cl_mem_flags flags,
3831  size_type size,
3832  void* host_ptr = NULL,
3833  cl_int* err = NULL)
3834  {
3835  cl_int error;
3836  object_ = ::clCreateBuffer(context(), flags, size, host_ptr, &error);
3837 
3838  detail::errHandler(error, __CREATE_BUFFER_ERR);
3839  if (err != NULL) {
3840  *err = error;
3841  }
3842  }
3843 
3854  cl_mem_flags flags,
3855  size_type size,
3856  void* host_ptr = NULL,
3857  cl_int* err = NULL)
3858  {
3859  cl_int error;
3860 
3861  Context context = Context::getDefault(err);
3862 
3863  object_ = ::clCreateBuffer(context(), flags, size, host_ptr, &error);
3864 
3865  detail::errHandler(error, __CREATE_BUFFER_ERR);
3866  if (err != NULL) {
3867  *err = error;
3868  }
3869  }
3870 
3876  template< typename IteratorType >
3878  IteratorType startIterator,
3879  IteratorType endIterator,
3880  bool readOnly,
3881  bool useHostPtr = false,
3882  cl_int* err = NULL)
3883  {
3884  typedef typename std::iterator_traits<IteratorType>::value_type DataType;
3885  cl_int error;
3886 
3887  cl_mem_flags flags = 0;
3888  if( readOnly ) {
3889  flags |= CL_MEM_READ_ONLY;
3890  }
3891  else {
3892  flags |= CL_MEM_READ_WRITE;
3893  }
3894  if( useHostPtr ) {
3895  flags |= CL_MEM_USE_HOST_PTR;
3896  }
3897 
3898  size_type size = sizeof(DataType)*(endIterator - startIterator);
3899 
3900  Context context = Context::getDefault(err);
3901 
3902  if( useHostPtr ) {
3903  object_ = ::clCreateBuffer(context(), flags, size, static_cast<DataType*>(&*startIterator), &error);
3904  } else {
3905  object_ = ::clCreateBuffer(context(), flags, size, 0, &error);
3906  }
3907 
3908  detail::errHandler(error, __CREATE_BUFFER_ERR);
3909  if (err != NULL) {
3910  *err = error;
3911  }
3912 
3913  if( !useHostPtr ) {
3914  error = cl::copy(startIterator, endIterator, *this);
3915  detail::errHandler(error, __CREATE_BUFFER_ERR);
3916  if (err != NULL) {
3917  *err = error;
3918  }
3919  }
3920  }
3921 
3927  template< typename IteratorType >
3928  Buffer(const Context &context, IteratorType startIterator, IteratorType endIterator,
3929  bool readOnly, bool useHostPtr = false, cl_int* err = NULL);
3930 
3935  template< typename IteratorType >
3936  Buffer(const CommandQueue &queue, IteratorType startIterator, IteratorType endIterator,
3937  bool readOnly, bool useHostPtr = false, cl_int* err = NULL);
3938 
3940  Buffer() : Memory() { }
3941 
3949  explicit Buffer(const cl_mem& buffer, bool retainObject = false) :
3950  Memory(buffer, retainObject) { }
3951 
3956  Buffer& operator = (const cl_mem& rhs)
3957  {
3958  Memory::operator=(rhs);
3959  return *this;
3960  }
3961 
3965  Buffer(const Buffer& buf) : Memory(buf) {}
3966 
3971  {
3972  Memory::operator=(buf);
3973  return *this;
3974  }
3975 
3979  Buffer(Buffer&& buf) CL_HPP_NOEXCEPT_ : Memory(std::move(buf)) {}
3980 
3985  {
3986  Memory::operator=(std::move(buf));
3987  return *this;
3988  }
3989 
3990 #if CL_HPP_TARGET_OPENCL_VERSION >= 110
3991 
3996  cl_mem_flags flags,
3997  cl_buffer_create_type buffer_create_type,
3998  const void * buffer_create_info,
3999  cl_int * err = NULL)
4000  {
4001  Buffer result;
4002  cl_int error;
4003  result.object_ = ::clCreateSubBuffer(
4004  object_,
4005  flags,
4006  buffer_create_type,
4007  buffer_create_info,
4008  &error);
4009 
4010  detail::errHandler(error, __CREATE_SUBBUFFER_ERR);
4011  if (err != NULL) {
4012  *err = error;
4013  }
4014 
4015  return result;
4016  }
4017 #endif // CL_HPP_TARGET_OPENCL_VERSION >= 110
4018 };
4019 
4020 #if defined (CL_HPP_USE_DX_INTEROP)
4021 
4029 class BufferD3D10 : public Buffer
4030 {
4031 public:
4032 
4033 
4039  BufferD3D10(
4040  const Context& context,
4041  cl_mem_flags flags,
4042  ID3D10Buffer* bufobj,
4043  cl_int * err = NULL) : pfn_clCreateFromD3D10BufferKHR(nullptr)
4044  {
4045  typedef CL_API_ENTRY cl_mem (CL_API_CALL *PFN_clCreateFromD3D10BufferKHR)(
4046  cl_context context, cl_mem_flags flags, ID3D10Buffer* buffer,
4047  cl_int* errcode_ret);
4048  PFN_clCreateFromD3D10BufferKHR pfn_clCreateFromD3D10BufferKHR;
4049 #if CL_HPP_TARGET_OPENCL_VERSION >= 120
4050  vector<cl_context_properties> props = context.getInfo<CL_CONTEXT_PROPERTIES>();
4051  cl_platform platform = -1;
4052  for( int i = 0; i < props.size(); ++i ) {
4053  if( props[i] == CL_CONTEXT_PLATFORM ) {
4054  platform = props[i+1];
4055  }
4056  }
4057  CL_HPP_INIT_CL_EXT_FCN_PTR_PLATFORM_(platform, clCreateFromD3D10BufferKHR);
4058 #elif CL_HPP_TARGET_OPENCL_VERSION >= 110
4059  CL_HPP_INIT_CL_EXT_FCN_PTR_(clCreateFromD3D10BufferKHR);
4060 #endif
4061 
4062  cl_int error;
4063  object_ = pfn_clCreateFromD3D10BufferKHR(
4064  context(),
4065  flags,
4066  bufobj,
4067  &error);
4068 
4069  detail::errHandler(error, __CREATE_GL_BUFFER_ERR);
4070  if (err != NULL) {
4071  *err = error;
4072  }
4073  }
4074 
4076  BufferD3D10() : Buffer() { }
4077 
4085  explicit BufferD3D10(const cl_mem& buffer, bool retainObject = false) :
4086  Buffer(buffer, retainObject) { }
4087 
4092  BufferD3D10& operator = (const cl_mem& rhs)
4093  {
4094  Buffer::operator=(rhs);
4095  return *this;
4096  }
4097 
4101  BufferD3D10(const BufferD3D10& buf) :
4102  Buffer(buf) {}
4103 
4107  BufferD3D10& operator = (const BufferD3D10 &buf)
4108  {
4109  Buffer::operator=(buf);
4110  return *this;
4111  }
4112 
4116  BufferD3D10(BufferD3D10&& buf) CL_HPP_NOEXCEPT_ : Buffer(std::move(buf)) {}
4117 
4121  BufferD3D10& operator = (BufferD3D10 &&buf)
4122  {
4123  Buffer::operator=(std::move(buf));
4124  return *this;
4125  }
4126 };
4127 #endif
4128 
4137 class BufferGL : public Buffer
4138 {
4139 public:
4146  const Context& context,
4147  cl_mem_flags flags,
4148  cl_GLuint bufobj,
4149  cl_int * err = NULL)
4150  {
4151  cl_int error;
4152  object_ = ::clCreateFromGLBuffer(
4153  context(),
4154  flags,
4155  bufobj,
4156  &error);
4157 
4158  detail::errHandler(error, __CREATE_GL_BUFFER_ERR);
4159  if (err != NULL) {
4160  *err = error;
4161  }
4162  }
4163 
4165  BufferGL() : Buffer() { }
4166 
4174  explicit BufferGL(const cl_mem& buffer, bool retainObject = false) :
4175  Buffer(buffer, retainObject) { }
4176 
4181  BufferGL& operator = (const cl_mem& rhs)
4182  {
4183  Buffer::operator=(rhs);
4184  return *this;
4185  }
4186 
4190  BufferGL(const BufferGL& buf) : Buffer(buf) {}
4191 
4196  {
4197  Buffer::operator=(buf);
4198  return *this;
4199  }
4200 
4204  BufferGL(BufferGL&& buf) CL_HPP_NOEXCEPT_ : Buffer(std::move(buf)) {}
4205 
4210  {
4211  Buffer::operator=(std::move(buf));
4212  return *this;
4213  }
4214 
4217  cl_gl_object_type *type,
4218  cl_GLuint * gl_object_name)
4219  {
4220  return detail::errHandler(
4221  ::clGetGLObjectInfo(object_,type,gl_object_name),
4222  __GET_GL_OBJECT_INFO_ERR);
4223  }
4224 };
4225 
4234 class BufferRenderGL : public Buffer
4235 {
4236 public:
4243  const Context& context,
4244  cl_mem_flags flags,
4245  cl_GLuint bufobj,
4246  cl_int * err = NULL)
4247  {
4248  cl_int error;
4249  object_ = ::clCreateFromGLRenderbuffer(
4250  context(),
4251  flags,
4252  bufobj,
4253  &error);
4254 
4255  detail::errHandler(error, __CREATE_GL_RENDER_BUFFER_ERR);
4256  if (err != NULL) {
4257  *err = error;
4258  }
4259  }
4260 
4263 
4271  explicit BufferRenderGL(const cl_mem& buffer, bool retainObject = false) :
4272  Buffer(buffer, retainObject) { }
4273 
4278  BufferRenderGL& operator = (const cl_mem& rhs)
4279  {
4280  Buffer::operator=(rhs);
4281  return *this;
4282  }
4283 
4287  BufferRenderGL(const BufferRenderGL& buf) : Buffer(buf) {}
4288 
4293  {
4294  Buffer::operator=(buf);
4295  return *this;
4296  }
4297 
4301  BufferRenderGL(BufferRenderGL&& buf) CL_HPP_NOEXCEPT_ : Buffer(std::move(buf)) {}
4302 
4307  {
4308  Buffer::operator=(std::move(buf));
4309  return *this;
4310  }
4311 
4314  cl_gl_object_type *type,
4315  cl_GLuint * gl_object_name)
4316  {
4317  return detail::errHandler(
4318  ::clGetGLObjectInfo(object_,type,gl_object_name),
4319  __GET_GL_OBJECT_INFO_ERR);
4320  }
4321 };
4322 
4329 class Image : public Memory
4330 {
4331 protected:
4333  Image() : Memory() { }
4334 
4342  explicit Image(const cl_mem& image, bool retainObject = false) :
4343  Memory(image, retainObject) { }
4344 
4349  Image& operator = (const cl_mem& rhs)
4350  {
4351  Memory::operator=(rhs);
4352  return *this;
4353  }
4354 
4358  Image(const Image& img) : Memory(img) {}
4359 
4363  Image& operator = (const Image &img)
4364  {
4365  Memory::operator=(img);
4366  return *this;
4367  }
4368 
4372  Image(Image&& img) CL_HPP_NOEXCEPT_ : Memory(std::move(img)) {}
4373 
4378  {
4379  Memory::operator=(std::move(img));
4380  return *this;
4381  }
4382 
4383 
4384 public:
4386  template <typename T>
4387  cl_int getImageInfo(cl_image_info name, T* param) const
4388  {
4389  return detail::errHandler(
4390  detail::getInfo(&::clGetImageInfo, object_, name, param),
4391  __GET_IMAGE_INFO_ERR);
4392  }
4393 
4395  template <cl_image_info name> typename
4397  getImageInfo(cl_int* err = NULL) const
4398  {
4399  typename detail::param_traits<
4400  detail::cl_image_info, name>::param_type param;
4401  cl_int result = getImageInfo(name, &param);
4402  if (err != NULL) {
4403  *err = result;
4404  }
4405  return param;
4406  }
4407 };
4408 
4409 #if CL_HPP_TARGET_OPENCL_VERSION >= 120
4410 
4416 class Image1D : public Image
4417 {
4418 public:
4424  const Context& context,
4425  cl_mem_flags flags,
4426  ImageFormat format,
4427  size_type width,
4428  void* host_ptr = NULL,
4429  cl_int* err = NULL)
4430  {
4431  cl_int error;
4432  cl_image_desc desc =
4433  {
4434  CL_MEM_OBJECT_IMAGE1D,
4435  width,
4436  0, 0, 0, 0, 0, 0, 0, 0
4437  };
4438  object_ = ::clCreateImage(
4439  context(),
4440  flags,
4441  &format,
4442  &desc,
4443  host_ptr,
4444  &error);
4445 
4446  detail::errHandler(error, __CREATE_IMAGE_ERR);
4447  if (err != NULL) {
4448  *err = error;
4449  }
4450  }
4451 
4453  Image1D() { }
4454 
4462  explicit Image1D(const cl_mem& image1D, bool retainObject = false) :
4463  Image(image1D, retainObject) { }
4464 
4469  Image1D& operator = (const cl_mem& rhs)
4470  {
4471  Image::operator=(rhs);
4472  return *this;
4473  }
4474 
4478  Image1D(const Image1D& img) : Image(img) {}
4479 
4484  {
4485  Image::operator=(img);
4486  return *this;
4487  }
4488 
4492  Image1D(Image1D&& img) CL_HPP_NOEXCEPT_ : Image(std::move(img)) {}
4493 
4498  {
4499  Image::operator=(std::move(img));
4500  return *this;
4501  }
4502 
4503 };
4504 
4508 class Image1DBuffer : public Image
4509 {
4510 public:
4511  Image1DBuffer(
4512  const Context& context,
4513  cl_mem_flags flags,
4514  ImageFormat format,
4515  size_type width,
4516  const Buffer &buffer,
4517  cl_int* err = NULL)
4518  {
4519  cl_int error;
4520  cl_image_desc desc =
4521  {
4522  CL_MEM_OBJECT_IMAGE1D_BUFFER,
4523  width,
4524  0, 0, 0, 0, 0, 0, 0,
4525  buffer()
4526  };
4527  object_ = ::clCreateImage(
4528  context(),
4529  flags,
4530  &format,
4531  &desc,
4532  NULL,
4533  &error);
4534 
4535  detail::errHandler(error, __CREATE_IMAGE_ERR);
4536  if (err != NULL) {
4537  *err = error;
4538  }
4539  }
4540 
4541  Image1DBuffer() { }
4542 
4550  explicit Image1DBuffer(const cl_mem& image1D, bool retainObject = false) :
4551  Image(image1D, retainObject) { }
4552 
4553  Image1DBuffer& operator = (const cl_mem& rhs)
4554  {
4555  Image::operator=(rhs);
4556  return *this;
4557  }
4558 
4562  Image1DBuffer(const Image1DBuffer& img) : Image(img) {}
4563 
4567  Image1DBuffer& operator = (const Image1DBuffer &img)
4568  {
4569  Image::operator=(img);
4570  return *this;
4571  }
4572 
4576  Image1DBuffer(Image1DBuffer&& img) CL_HPP_NOEXCEPT_ : Image(std::move(img)) {}
4577 
4581  Image1DBuffer& operator = (Image1DBuffer &&img)
4582  {
4583  Image::operator=(std::move(img));
4584  return *this;
4585  }
4586 
4587 };
4588 
4592 class Image1DArray : public Image
4593 {
4594 public:
4595  Image1DArray(
4596  const Context& context,
4597  cl_mem_flags flags,
4598  ImageFormat format,
4599  size_type arraySize,
4600  size_type width,
4601  size_type rowPitch,
4602  void* host_ptr = NULL,
4603  cl_int* err = NULL)
4604  {
4605  cl_int error;
4606  cl_image_desc desc =
4607  {
4608  CL_MEM_OBJECT_IMAGE1D_ARRAY,
4609  width,
4610  0, 0, // height, depth (unused)
4611  arraySize,
4612  rowPitch,
4613  0, 0, 0, 0
4614  };
4615  object_ = ::clCreateImage(
4616  context(),
4617  flags,
4618  &format,
4619  &desc,
4620  host_ptr,
4621  &error);
4622 
4623  detail::errHandler(error, __CREATE_IMAGE_ERR);
4624  if (err != NULL) {
4625  *err = error;
4626  }
4627  }
4628 
4629  Image1DArray() { }
4630 
4638  explicit Image1DArray(const cl_mem& imageArray, bool retainObject = false) :
4639  Image(imageArray, retainObject) { }
4640 
4641 
4642  Image1DArray& operator = (const cl_mem& rhs)
4643  {
4644  Image::operator=(rhs);
4645  return *this;
4646  }
4647 
4651  Image1DArray(const Image1DArray& img) : Image(img) {}
4652 
4656  Image1DArray& operator = (const Image1DArray &img)
4657  {
4658  Image::operator=(img);
4659  return *this;
4660  }
4661 
4665  Image1DArray(Image1DArray&& img) CL_HPP_NOEXCEPT_ : Image(std::move(img)) {}
4666 
4670  Image1DArray& operator = (Image1DArray &&img)
4671  {
4672  Image::operator=(std::move(img));
4673  return *this;
4674  }
4675 
4676 };
4677 #endif // #if CL_HPP_TARGET_OPENCL_VERSION >= 120
4678 
4679 
4686 class Image2D : public Image
4687 {
4688 public:
4694  const Context& context,
4695  cl_mem_flags flags,
4696  ImageFormat format,
4697  size_type width,
4698  size_type height,
4699  size_type row_pitch = 0,
4700  void* host_ptr = NULL,
4701  cl_int* err = NULL)
4702  {
4703  cl_int error;
4704  bool useCreateImage;
4705 
4706 #if CL_HPP_TARGET_OPENCL_VERSION >= 120 && CL_HPP_MINIMUM_OPENCL_VERSION < 120
4707  // Run-time decision based on the actual platform
4708  {
4709  cl_uint version = detail::getContextPlatformVersion(context());
4710  useCreateImage = (version >= 0x10002); // OpenCL 1.2 or above
4711  }
4712 #elif CL_HPP_TARGET_OPENCL_VERSION >= 120
4713  useCreateImage = true;
4714 #else
4715  useCreateImage = false;
4716 #endif
4717 
4718 #if CL_HPP_TARGET_OPENCL_VERSION >= 120
4719  if (useCreateImage)
4720  {
4721  cl_image_desc desc =
4722  {
4723  CL_MEM_OBJECT_IMAGE2D,
4724  width,
4725  height,
4726  0, 0, // depth, array size (unused)
4727  row_pitch,
4728  0, 0, 0, 0
4729  };
4730  object_ = ::clCreateImage(
4731  context(),
4732  flags,
4733  &format,
4734  &desc,
4735  host_ptr,
4736  &error);
4737 
4738  detail::errHandler(error, __CREATE_IMAGE_ERR);
4739  if (err != NULL) {
4740  *err = error;
4741  }
4742  }
4743 #endif // CL_HPP_TARGET_OPENCL_VERSION >= 120
4744 #if CL_HPP_MINIMUM_OPENCL_VERSION < 120
4745  if (!useCreateImage)
4746  {
4747  object_ = ::clCreateImage2D(
4748  context(), flags,&format, width, height, row_pitch, host_ptr, &error);
4749 
4750  detail::errHandler(error, __CREATE_IMAGE2D_ERR);
4751  if (err != NULL) {
4752  *err = error;
4753  }
4754  }
4755 #endif // CL_HPP_MINIMUM_OPENCL_VERSION < 120
4756  }
4757 
4758 #if CL_HPP_TARGET_OPENCL_VERSION >= 200 || defined(CL_HPP_USE_CL_IMAGE2D_FROM_BUFFER_KHR)
4759 
4765  const Context& context,
4766  ImageFormat format,
4767  const Buffer &sourceBuffer,
4768  size_type width,
4769  size_type height,
4770  size_type row_pitch = 0,
4771  cl_int* err = nullptr)
4772  {
4773  cl_int error;
4774 
4775  cl_image_desc desc =
4776  {
4777  CL_MEM_OBJECT_IMAGE2D,
4778  width,
4779  height,
4780  0, 0, // depth, array size (unused)
4781  row_pitch,
4782  0, 0, 0,
4783  // Use buffer as input to image
4784  sourceBuffer()
4785  };
4786  object_ = ::clCreateImage(
4787  context(),
4788  0, // flags inherited from buffer
4789  &format,
4790  &desc,
4791  nullptr,
4792  &error);
4793 
4794  detail::errHandler(error, __CREATE_IMAGE_ERR);
4795  if (err != nullptr) {
4796  *err = error;
4797  }
4798  }
4799 #endif //#if CL_HPP_TARGET_OPENCL_VERSION >= 200 || defined(CL_HPP_USE_CL_IMAGE2D_FROM_BUFFER_KHR)
4800 
4801 #if CL_HPP_TARGET_OPENCL_VERSION >= 200
4802 
4815  const Context& context,
4816  cl_channel_order order,
4817  const Image &sourceImage,
4818  cl_int* err = nullptr)
4819  {
4820  cl_int error;
4821 
4822  // Descriptor fields have to match source image
4823  size_type sourceWidth =
4824  sourceImage.getImageInfo<CL_IMAGE_WIDTH>();
4825  size_type sourceHeight =
4826  sourceImage.getImageInfo<CL_IMAGE_HEIGHT>();
4827  size_type sourceRowPitch =
4828  sourceImage.getImageInfo<CL_IMAGE_ROW_PITCH>();
4829  cl_uint sourceNumMIPLevels =
4830  sourceImage.getImageInfo<CL_IMAGE_NUM_MIP_LEVELS>();
4831  cl_uint sourceNumSamples =
4832  sourceImage.getImageInfo<CL_IMAGE_NUM_SAMPLES>();
4833  cl_image_format sourceFormat =
4834  sourceImage.getImageInfo<CL_IMAGE_FORMAT>();
4835 
4836  // Update only the channel order.
4837  // Channel format inherited from source.
4838  sourceFormat.image_channel_order = order;
4839  cl_image_desc desc =
4840  {
4841  CL_MEM_OBJECT_IMAGE2D,
4842  sourceWidth,
4843  sourceHeight,
4844  0, 0, // depth (unused), array size (unused)
4845  sourceRowPitch,
4846  0, // slice pitch (unused)
4847  sourceNumMIPLevels,
4848  sourceNumSamples,
4849  // Use buffer as input to image
4850  sourceImage()
4851  };
4852  object_ = ::clCreateImage(
4853  context(),
4854  0, // flags should be inherited from mem_object
4855  &sourceFormat,
4856  &desc,
4857  nullptr,
4858  &error);
4859 
4860  detail::errHandler(error, __CREATE_IMAGE_ERR);
4861  if (err != nullptr) {
4862  *err = error;
4863  }
4864  }
4865 #endif //#if CL_HPP_TARGET_OPENCL_VERSION >= 200
4866 
4868  Image2D() { }
4869 
4877  explicit Image2D(const cl_mem& image2D, bool retainObject = false) :
4878  Image(image2D, retainObject) { }
4879 
4884  Image2D& operator = (const cl_mem& rhs)
4885  {
4886  Image::operator=(rhs);
4887  return *this;
4888  }
4889 
4893  Image2D(const Image2D& img) : Image(img) {}
4894 
4899  {
4900  Image::operator=(img);
4901  return *this;
4902  }
4903 
4907  Image2D(Image2D&& img) CL_HPP_NOEXCEPT_ : Image(std::move(img)) {}
4908 
4913  {
4914  Image::operator=(std::move(img));
4915  return *this;
4916  }
4917 
4918 };
4919 
4920 
4921 #if defined(CL_USE_DEPRECATED_OPENCL_1_1_APIS)
4922 
4931 class CL_EXT_PREFIX__VERSION_1_1_DEPRECATED Image2DGL : public Image2D
4932 {
4933 public:
4940  const Context& context,
4941  cl_mem_flags flags,
4942  cl_GLenum target,
4943  cl_GLint miplevel,
4944  cl_GLuint texobj,
4945  cl_int * err = NULL)
4946  {
4947  cl_int error;
4948  object_ = ::clCreateFromGLTexture2D(
4949  context(),
4950  flags,
4951  target,
4952  miplevel,
4953  texobj,
4954  &error);
4955 
4956  detail::errHandler(error, __CREATE_GL_TEXTURE_2D_ERR);
4957  if (err != NULL) {
4958  *err = error;
4959  }
4960 
4961  }
4962 
4964  Image2DGL() : Image2D() { }
4965 
4973  explicit Image2DGL(const cl_mem& image, bool retainObject = false) :
4974  Image2D(image, retainObject) { }
4975 
4980  Image2DGL& operator = (const cl_mem& rhs)
4981  {
4982  Image2D::operator=(rhs);
4983  return *this;
4984  }
4985 
4989  Image2DGL(const Image2DGL& img) : Image2D(img) {}
4990 
4994  Image2DGL& operator = (const Image2DGL &img)
4995  {
4996  Image2D::operator=(img);
4997  return *this;
4998  }
4999 
5003  Image2DGL(Image2DGL&& img) CL_HPP_NOEXCEPT_ : Image2D(std::move(img)) {}
5004 
5008  Image2DGL& operator = (Image2DGL &&img)
5009  {
5010  Image2D::operator=(std::move(img));
5011  return *this;
5012  }
5013 
5014 } CL_EXT_SUFFIX__VERSION_1_1_DEPRECATED;
5015 #endif // CL_USE_DEPRECATED_OPENCL_1_1_APIS
5016 
5017 #if CL_HPP_TARGET_OPENCL_VERSION >= 120
5018 
5021 class Image2DArray : public Image
5022 {
5023 public:
5024  Image2DArray(
5025  const Context& context,
5026  cl_mem_flags flags,
5027  ImageFormat format,
5028  size_type arraySize,
5029  size_type width,
5030  size_type height,
5031  size_type rowPitch,
5032  size_type slicePitch,
5033  void* host_ptr = NULL,
5034  cl_int* err = NULL)
5035  {
5036  cl_int error;
5037  cl_image_desc desc =
5038  {
5039  CL_MEM_OBJECT_IMAGE2D_ARRAY,
5040  width,
5041  height,
5042  0, // depth (unused)
5043  arraySize,
5044  rowPitch,
5045  slicePitch,
5046  0, 0, 0
5047  };
5048  object_ = ::clCreateImage(
5049  context(),
5050  flags,
5051  &format,
5052  &desc,
5053  host_ptr,
5054  &error);
5055 
5056  detail::errHandler(error, __CREATE_IMAGE_ERR);
5057  if (err != NULL) {
5058  *err = error;
5059  }
5060  }
5061 
5062  Image2DArray() { }
5063 
5071  explicit Image2DArray(const cl_mem& imageArray, bool retainObject = false) : Image(imageArray, retainObject) { }
5072 
5073  Image2DArray& operator = (const cl_mem& rhs)
5074  {
5075  Image::operator=(rhs);
5076  return *this;
5077  }
5078 
5082  Image2DArray(const Image2DArray& img) : Image(img) {}
5083 
5087  Image2DArray& operator = (const Image2DArray &img)
5088  {
5089  Image::operator=(img);
5090  return *this;
5091  }
5092 
5096  Image2DArray(Image2DArray&& img) CL_HPP_NOEXCEPT_ : Image(std::move(img)) {}
5097 
5101  Image2DArray& operator = (Image2DArray &&img)
5102  {
5103  Image::operator=(std::move(img));
5104  return *this;
5105  }
5106 };
5107 #endif // #if CL_HPP_TARGET_OPENCL_VERSION >= 120
5108 
5115 class Image3D : public Image
5116 {
5117 public:
5123  const Context& context,
5124  cl_mem_flags flags,
5125  ImageFormat format,
5126  size_type width,
5127  size_type height,
5128  size_type depth,
5129  size_type row_pitch = 0,
5130  size_type slice_pitch = 0,
5131  void* host_ptr = NULL,
5132  cl_int* err = NULL)
5133  {
5134  cl_int error;
5135  bool useCreateImage;
5136 
5137 #if CL_HPP_TARGET_OPENCL_VERSION >= 120 && CL_HPP_MINIMUM_OPENCL_VERSION < 120
5138  // Run-time decision based on the actual platform
5139  {
5140  cl_uint version = detail::getContextPlatformVersion(context());
5141  useCreateImage = (version >= 0x10002); // OpenCL 1.2 or above
5142  }
5143 #elif CL_HPP_TARGET_OPENCL_VERSION >= 120
5144  useCreateImage = true;
5145 #else
5146  useCreateImage = false;
5147 #endif
5148 
5149 #if CL_HPP_TARGET_OPENCL_VERSION >= 120
5150  if (useCreateImage)
5151  {
5152  cl_image_desc desc =
5153  {
5154  CL_MEM_OBJECT_IMAGE3D,
5155  width,
5156  height,
5157  depth,
5158  0, // array size (unused)
5159  row_pitch,
5160  slice_pitch,
5161  0, 0, 0
5162  };
5163  object_ = ::clCreateImage(
5164  context(),
5165  flags,
5166  &format,
5167  &desc,
5168  host_ptr,
5169  &error);
5170 
5171  detail::errHandler(error, __CREATE_IMAGE_ERR);
5172  if (err != NULL) {
5173  *err = error;
5174  }
5175  }
5176 #endif // CL_HPP_TARGET_OPENCL_VERSION >= 120
5177 #if CL_HPP_MINIMUM_OPENCL_VERSION < 120
5178  if (!useCreateImage)
5179  {
5180  object_ = ::clCreateImage3D(
5181  context(), flags, &format, width, height, depth, row_pitch,
5182  slice_pitch, host_ptr, &error);
5183 
5184  detail::errHandler(error, __CREATE_IMAGE3D_ERR);
5185  if (err != NULL) {
5186  *err = error;
5187  }
5188  }
5189 #endif // CL_HPP_MINIMUM_OPENCL_VERSION < 120
5190  }
5191 
5193  Image3D() : Image() { }
5194 
5202  explicit Image3D(const cl_mem& image3D, bool retainObject = false) :
5203  Image(image3D, retainObject) { }
5204 
5209  Image3D& operator = (const cl_mem& rhs)
5210  {
5211  Image::operator=(rhs);
5212  return *this;
5213  }
5214 
5218  Image3D(const Image3D& img) : Image(img) {}
5219 
5224  {
5225  Image::operator=(img);
5226  return *this;
5227  }
5228 
5232  Image3D(Image3D&& img) CL_HPP_NOEXCEPT_ : Image(std::move(img)) {}
5233 
5238  {
5239  Image::operator=(std::move(img));
5240  return *this;
5241  }
5242 };
5243 
5244 #if defined(CL_USE_DEPRECATED_OPENCL_1_1_APIS)
5245 
5253 class Image3DGL : public Image3D
5254 {
5255 public:
5262  const Context& context,
5263  cl_mem_flags flags,
5264  cl_GLenum target,
5265  cl_GLint miplevel,
5266  cl_GLuint texobj,
5267  cl_int * err = NULL)
5268  {
5269  cl_int error;
5270  object_ = ::clCreateFromGLTexture3D(
5271  context(),
5272  flags,
5273  target,
5274  miplevel,
5275  texobj,
5276  &error);
5277 
5278  detail::errHandler(error, __CREATE_GL_TEXTURE_3D_ERR);
5279  if (err != NULL) {
5280  *err = error;
5281  }
5282  }
5283 
5285  Image3DGL() : Image3D() { }
5286 
5294  explicit Image3DGL(const cl_mem& image, bool retainObject = false) :
5295  Image3D(image, retainObject) { }
5296 
5301  Image3DGL& operator = (const cl_mem& rhs)
5302  {
5303  Image3D::operator=(rhs);
5304  return *this;
5305  }
5306 
5310  Image3DGL(const Image3DGL& img) : Image3D(img) {}
5311 
5316  {
5317  Image3D::operator=(img);
5318  return *this;
5319  }
5320 
5324  Image3DGL(Image3DGL&& img) CL_HPP_NOEXCEPT_ : Image3D(std::move(img)) {}
5325 
5330  {
5331  Image3D::operator=(std::move(img));
5332  return *this;
5333  }
5334 };
5335 #endif // CL_USE_DEPRECATED_OPENCL_1_1_APIS
5336 
5337 #if CL_HPP_TARGET_OPENCL_VERSION >= 120
5338 
5344 class ImageGL : public Image
5345 {
5346 public:
5347  ImageGL(
5348  const Context& context,
5349  cl_mem_flags flags,
5350  cl_GLenum target,
5351  cl_GLint miplevel,
5352  cl_GLuint texobj,
5353  cl_int * err = NULL)
5354  {
5355  cl_int error;
5356  object_ = ::clCreateFromGLTexture(
5357  context(),
5358  flags,
5359  target,
5360  miplevel,
5361  texobj,
5362  &error);
5363 
5364  detail::errHandler(error, __CREATE_GL_TEXTURE_ERR);
5365  if (err != NULL) {
5366  *err = error;
5367  }
5368  }
5369 
5370  ImageGL() : Image() { }
5371 
5379  explicit ImageGL(const cl_mem& image, bool retainObject = false) :
5380  Image(image, retainObject) { }
5381 
5382  ImageGL& operator = (const cl_mem& rhs)
5383  {
5384  Image::operator=(rhs);
5385  return *this;
5386  }
5387 
5391  ImageGL(const ImageGL& img) : Image(img) {}
5392 
5396  ImageGL& operator = (const ImageGL &img)
5397  {
5398  Image::operator=(img);
5399  return *this;
5400  }
5401 
5405  ImageGL(ImageGL&& img) CL_HPP_NOEXCEPT_ : Image(std::move(img)) {}
5406 
5410  ImageGL& operator = (ImageGL &&img)
5411  {
5412  Image::operator=(std::move(img));
5413  return *this;
5414  }
5415 };
5416 #endif // CL_HPP_TARGET_OPENCL_VERSION >= 120
5417 
5418 
5419 
5420 #if CL_HPP_TARGET_OPENCL_VERSION >= 200
5421 
5427 class Pipe : public Memory
5428 {
5429 public:
5430 
5441  const Context& context,
5442  cl_uint packet_size,
5443  cl_uint max_packets,
5444  cl_int* err = NULL)
5445  {
5446  cl_int error;
5447 
5448  cl_mem_flags flags = CL_MEM_READ_WRITE | CL_MEM_HOST_NO_ACCESS;
5449  object_ = ::clCreatePipe(context(), flags, packet_size, max_packets, nullptr, &error);
5450 
5451  detail::errHandler(error, __CREATE_PIPE_ERR);
5452  if (err != NULL) {
5453  *err = error;
5454  }
5455  }
5456 
5466  cl_uint packet_size,
5467  cl_uint max_packets,
5468  cl_int* err = NULL)
5469  {
5470  cl_int error;
5471 
5472  Context context = Context::getDefault(err);
5473 
5474  cl_mem_flags flags = CL_MEM_READ_WRITE | CL_MEM_HOST_NO_ACCESS;
5475  object_ = ::clCreatePipe(context(), flags, packet_size, max_packets, nullptr, &error);
5476 
5477  detail::errHandler(error, __CREATE_PIPE_ERR);
5478  if (err != NULL) {
5479  *err = error;
5480  }
5481  }
5482 
5484  Pipe() : Memory() { }
5485 
5493  explicit Pipe(const cl_mem& pipe, bool retainObject = false) :
5494  Memory(pipe, retainObject) { }
5495 
5500  Pipe& operator = (const cl_mem& rhs)
5501  {
5502  Memory::operator=(rhs);
5503  return *this;
5504  }
5505 
5509  Pipe(const Pipe& pipe) : Memory(pipe) {}
5510 
5514  Pipe& operator = (const Pipe &pipe)
5515  {
5516  Memory::operator=(pipe);
5517  return *this;
5518  }
5519 
5523  Pipe(Pipe&& pipe) CL_HPP_NOEXCEPT_ : Memory(std::move(pipe)) {}
5524 
5529  {
5530  Memory::operator=(std::move(pipe));
5531  return *this;
5532  }
5533 
5535  template <typename T>
5536  cl_int getInfo(cl_pipe_info name, T* param) const
5537  {
5538  return detail::errHandler(
5539  detail::getInfo(&::clGetPipeInfo, object_, name, param),
5540  __GET_PIPE_INFO_ERR);
5541  }
5542 
5544  template <cl_pipe_info name> typename
5546  getInfo(cl_int* err = NULL) const
5547  {
5548  typename detail::param_traits<
5549  detail::cl_pipe_info, name>::param_type param;
5550  cl_int result = getInfo(name, &param);
5551  if (err != NULL) {
5552  *err = result;
5553  }
5554  return param;
5555  }
5556 }; // class Pipe
5557 #endif // CL_HPP_TARGET_OPENCL_VERSION >= 200
5558 
5559 
5568 class Sampler : public detail::Wrapper<cl_sampler>
5569 {
5570 public:
5572  Sampler() { }
5573 
5579  const Context& context,
5580  cl_bool normalized_coords,
5581  cl_addressing_mode addressing_mode,
5582  cl_filter_mode filter_mode,
5583  cl_int* err = NULL)
5584  {
5585  cl_int error;
5586 
5587 #if CL_HPP_TARGET_OPENCL_VERSION >= 200
5588  cl_sampler_properties sampler_properties[] = {
5589  CL_SAMPLER_NORMALIZED_COORDS, normalized_coords,
5590  CL_SAMPLER_ADDRESSING_MODE, addressing_mode,
5591  CL_SAMPLER_FILTER_MODE, filter_mode,
5592  0 };
5593  object_ = ::clCreateSamplerWithProperties(
5594  context(),
5595  sampler_properties,
5596  &error);
5597 
5598  detail::errHandler(error, __CREATE_SAMPLER_WITH_PROPERTIES_ERR);
5599  if (err != NULL) {
5600  *err = error;
5601  }
5602 #else
5603  object_ = ::clCreateSampler(
5604  context(),
5605  normalized_coords,
5606  addressing_mode,
5607  filter_mode,
5608  &error);
5609 
5610  detail::errHandler(error, __CREATE_SAMPLER_ERR);
5611  if (err != NULL) {
5612  *err = error;
5613  }
5614 #endif
5615  }
5616 
5625  explicit Sampler(const cl_sampler& sampler, bool retainObject = false) :
5626  detail::Wrapper<cl_type>(sampler, retainObject) { }
5627 
5633  Sampler& operator = (const cl_sampler& rhs)
5634  {
5636  return *this;
5637  }
5638 
5642  Sampler(const Sampler& sam) : detail::Wrapper<cl_type>(sam) {}
5643 
5648  {
5650  return *this;
5651  }
5652 
5656  Sampler(Sampler&& sam) CL_HPP_NOEXCEPT_ : detail::Wrapper<cl_type>(std::move(sam)) {}
5657 
5662  {
5663  detail::Wrapper<cl_type>::operator=(std::move(sam));
5664  return *this;
5665  }
5666 
5668  template <typename T>
5669  cl_int getInfo(cl_sampler_info name, T* param) const
5670  {
5671  return detail::errHandler(
5672  detail::getInfo(&::clGetSamplerInfo, object_, name, param),
5673  __GET_SAMPLER_INFO_ERR);
5674  }
5675 
5677  template <cl_sampler_info name> typename
5679  getInfo(cl_int* err = NULL) const
5680  {
5681  typename detail::param_traits<
5682  detail::cl_sampler_info, name>::param_type param;
5683  cl_int result = getInfo(name, &param);
5684  if (err != NULL) {
5685  *err = result;
5686  }
5687  return param;
5688  }
5689 };
5690 
5691 class Program;
5692 class CommandQueue;
5693 class DeviceCommandQueue;
5694 class Kernel;
5695 
5697 class NDRange
5698 {
5699 private:
5700  size_type sizes_[3];
5701  cl_uint dimensions_;
5702 
5703 public:
5706  : dimensions_(0)
5707  {
5708  sizes_[0] = 0;
5709  sizes_[1] = 0;
5710  sizes_[2] = 0;
5711  }
5712 
5714  NDRange(size_type size0)
5715  : dimensions_(1)
5716  {
5717  sizes_[0] = size0;
5718  sizes_[1] = 1;
5719  sizes_[2] = 1;
5720  }
5721 
5723  NDRange(size_type size0, size_type size1)
5724  : dimensions_(2)
5725  {
5726  sizes_[0] = size0;
5727  sizes_[1] = size1;
5728  sizes_[2] = 1;
5729  }
5730 
5732  NDRange(size_type size0, size_type size1, size_type size2)
5733  : dimensions_(3)
5734  {
5735  sizes_[0] = size0;
5736  sizes_[1] = size1;
5737  sizes_[2] = size2;
5738  }
5739 
5744  operator const size_type*() const {
5745  return sizes_;
5746  }
5747 
5749  size_type dimensions() const
5750  {
5751  return dimensions_;
5752  }
5753 
5755  // runtime number of dimensions
5756  size_type size() const
5757  {
5758  return dimensions_*sizeof(size_type);
5759  }
5760 
5761  size_type* get()
5762  {
5763  return sizes_;
5764  }
5765 
5766  const size_type* get() const
5767  {
5768  return sizes_;
5769  }
5770 };
5771 
5773 static const NDRange NullRange;
5774 
5777 {
5778  size_type size_;
5779 };
5780 
5781 namespace detail {
5782 
5783 template <typename T, class Enable = void>
5785 
5786 // Enable for objects that are not subclasses of memory
5787 // Pointers, constants etc
5788 template <typename T>
5789 struct KernelArgumentHandler<T, typename std::enable_if<!std::is_base_of<cl::Memory, T>::value>::type>
5790 {
5791  static size_type size(const T&) { return sizeof(T); }
5792  static const T* ptr(const T& value) { return &value; }
5793 };
5794 
5795 // Enable for subclasses of memory where we want to get a reference to the cl_mem out
5796 // and pass that in for safety
5797 template <typename T>
5798 struct KernelArgumentHandler<T, typename std::enable_if<std::is_base_of<cl::Memory, T>::value>::type>
5799 {
5800  static size_type size(const T&) { return sizeof(cl_mem); }
5801  static const cl_mem* ptr(const T& value) { return &(value()); }
5802 };
5803 
5804 // Specialization for DeviceCommandQueue defined later
5805 
5806 template <>
5808 {
5809  static size_type size(const LocalSpaceArg& value) { return value.size_; }
5810  static const void* ptr(const LocalSpaceArg&) { return NULL; }
5811 };
5812 
5813 }
5815 
5819 inline LocalSpaceArg
5820 Local(size_type size)
5821 {
5822  LocalSpaceArg ret = { size };
5823  return ret;
5824 }
5825 
5834 class Kernel : public detail::Wrapper<cl_kernel>
5835 {
5836 public:
5837  inline Kernel(const Program& program, const char* name, cl_int* err = NULL);
5838 
5840  Kernel() { }
5841 
5850  explicit Kernel(const cl_kernel& kernel, bool retainObject = false) :
5851  detail::Wrapper<cl_type>(kernel, retainObject) { }
5852 
5858  Kernel& operator = (const cl_kernel& rhs)
5859  {
5861  return *this;
5862  }
5863 
5867  Kernel(const Kernel& kernel) : detail::Wrapper<cl_type>(kernel) {}
5868 
5872  Kernel& operator = (const Kernel &kernel)
5873  {
5875  return *this;
5876  }
5877 
5881  Kernel(Kernel&& kernel) CL_HPP_NOEXCEPT_ : detail::Wrapper<cl_type>(std::move(kernel)) {}
5882 
5887  {
5888  detail::Wrapper<cl_type>::operator=(std::move(kernel));
5889  return *this;
5890  }
5891 
5892  template <typename T>
5893  cl_int getInfo(cl_kernel_info name, T* param) const
5894  {
5895  return detail::errHandler(
5896  detail::getInfo(&::clGetKernelInfo, object_, name, param),
5897  __GET_KERNEL_INFO_ERR);
5898  }
5899 
5900  template <cl_kernel_info name> typename
5901  detail::param_traits<detail::cl_kernel_info, name>::param_type
5902  getInfo(cl_int* err = NULL) const
5903  {
5904  typename detail::param_traits<
5905  detail::cl_kernel_info, name>::param_type param;
5906  cl_int result = getInfo(name, &param);
5907  if (err != NULL) {
5908  *err = result;
5909  }
5910  return param;
5911  }
5912 
5913 #if CL_HPP_TARGET_OPENCL_VERSION >= 120
5914  template <typename T>
5915  cl_int getArgInfo(cl_uint argIndex, cl_kernel_arg_info name, T* param) const
5916  {
5917  return detail::errHandler(
5918  detail::getInfo(&::clGetKernelArgInfo, object_, argIndex, name, param),
5919  __GET_KERNEL_ARG_INFO_ERR);
5920  }
5921 
5922  template <cl_kernel_arg_info name> typename
5923  detail::param_traits<detail::cl_kernel_arg_info, name>::param_type
5924  getArgInfo(cl_uint argIndex, cl_int* err = NULL) const
5925  {
5926  typename detail::param_traits<
5927  detail::cl_kernel_arg_info, name>::param_type param;
5928  cl_int result = getArgInfo(argIndex, name, &param);
5929  if (err != NULL) {
5930  *err = result;
5931  }
5932  return param;
5933  }
5934 #endif // CL_HPP_TARGET_OPENCL_VERSION >= 120
5935 
5936  template <typename T>
5937  cl_int getWorkGroupInfo(
5938  const Device& device, cl_kernel_work_group_info name, T* param) const
5939  {
5940  return detail::errHandler(
5941  detail::getInfo(
5942  &::clGetKernelWorkGroupInfo, object_, device(), name, param),
5943  __GET_KERNEL_WORK_GROUP_INFO_ERR);
5944  }
5945 
5946  template <cl_kernel_work_group_info name> typename
5947  detail::param_traits<detail::cl_kernel_work_group_info, name>::param_type
5948  getWorkGroupInfo(const Device& device, cl_int* err = NULL) const
5949  {
5950  typename detail::param_traits<
5951  detail::cl_kernel_work_group_info, name>::param_type param;
5952  cl_int result = getWorkGroupInfo(device, name, &param);
5953  if (err != NULL) {
5954  *err = result;
5955  }
5956  return param;
5957  }
5958 
5959 #if (CL_HPP_TARGET_OPENCL_VERSION >= 200 && defined(CL_HPP_USE_CL_SUB_GROUPS_KHR)) || CL_HPP_TARGET_OPENCL_VERSION >= 210
5960  cl_int getSubGroupInfo(const cl::Device &dev, cl_kernel_sub_group_info name, const cl::NDRange &range, size_type* param) const
5961  {
5962 #if CL_HPP_TARGET_OPENCL_VERSION >= 210
5963 
5964  return detail::errHandler(
5965  clGetKernelSubGroupInfo(object_, dev(), name, range.size(), range.get(), sizeof(size_type), param, nullptr),
5966  __GET_KERNEL_SUB_GROUP_INFO_ERR);
5967 
5968 #else // #if CL_HPP_TARGET_OPENCL_VERSION >= 210
5969 
5970  typedef clGetKernelSubGroupInfoKHR_fn PFN_clGetKernelSubGroupInfoKHR;
5971  static PFN_clGetKernelSubGroupInfoKHR pfn_clGetKernelSubGroupInfoKHR = NULL;
5972  CL_HPP_INIT_CL_EXT_FCN_PTR_(clGetKernelSubGroupInfoKHR);
5973 
5974  return detail::errHandler(
5975  pfn_clGetKernelSubGroupInfoKHR(object_, dev(), name, range.size(), range.get(), sizeof(size_type), param, nullptr),
5976  __GET_KERNEL_SUB_GROUP_INFO_ERR);
5977 
5978 #endif // #if CL_HPP_TARGET_OPENCL_VERSION >= 210
5979  }
5980 
5981  template <cl_kernel_sub_group_info name>
5982  size_type getSubGroupInfo(const cl::Device &dev, const cl::NDRange &range, cl_int* err = NULL) const
5983  {
5984  size_type param;
5985  cl_int result = getSubGroupInfo(dev, name, range, &param);
5986  if (err != NULL) {
5987  *err = result;
5988  }
5989  return param;
5990  }
5991 #endif // #if CL_HPP_TARGET_OPENCL_VERSION >= 200
5992 
5993 #if CL_HPP_TARGET_OPENCL_VERSION >= 200
5994 
5996  template<typename T, class D>
5997  cl_int setArg(cl_uint index, const cl::pointer<T, D> &argPtr)
5998  {
5999  return detail::errHandler(
6000  ::clSetKernelArgSVMPointer(object_, index, argPtr.get()),
6001  __SET_KERNEL_ARGS_ERR);
6002  }
6003 
6006  template<typename T, class Alloc>
6007  cl_int setArg(cl_uint index, const cl::vector<T, Alloc> &argPtr)
6008  {
6009  return detail::errHandler(
6010  ::clSetKernelArgSVMPointer(object_, index, argPtr.data()),
6011  __SET_KERNEL_ARGS_ERR);
6012  }
6013 
6016  template<typename T>
6017  typename std::enable_if<std::is_pointer<T>::value, cl_int>::type
6018  setArg(cl_uint index, const T argPtr)
6019  {
6020  return detail::errHandler(
6021  ::clSetKernelArgSVMPointer(object_, index, argPtr),
6022  __SET_KERNEL_ARGS_ERR);
6023  }
6024 #endif // #if CL_HPP_TARGET_OPENCL_VERSION >= 200
6025 
6028  template <typename T>
6029  typename std::enable_if<!std::is_pointer<T>::value, cl_int>::type
6030  setArg(cl_uint index, const T &value)
6031  {
6032  return detail::errHandler(
6033  ::clSetKernelArg(
6034  object_,
6035  index,
6038  __SET_KERNEL_ARGS_ERR);
6039  }
6040 
6041  cl_int setArg(cl_uint index, size_type size, const void* argPtr)
6042  {
6043  return detail::errHandler(
6044  ::clSetKernelArg(object_, index, size, argPtr),
6045  __SET_KERNEL_ARGS_ERR);
6046  }
6047 
6048 #if CL_HPP_TARGET_OPENCL_VERSION >= 200
6049 
6053  cl_int setSVMPointers(const vector<void*> &pointerList)
6054  {
6055  return detail::errHandler(
6056  ::clSetKernelExecInfo(
6057  object_,
6058  CL_KERNEL_EXEC_INFO_SVM_PTRS,
6059  sizeof(void*)*pointerList.size(),
6060  pointerList.data()));
6061  }
6062 
6067  template<int ArrayLength>
6068  cl_int setSVMPointers(const std::array<void*, ArrayLength> &pointerList)
6069  {
6070  return detail::errHandler(
6071  ::clSetKernelExecInfo(
6072  object_,
6073  CL_KERNEL_EXEC_INFO_SVM_PTRS,
6074  sizeof(void*)*pointerList.size(),
6075  pointerList.data()));
6076  }
6077 
6089  cl_int enableFineGrainedSystemSVM(bool svmEnabled)
6090  {
6091  cl_bool svmEnabled_ = svmEnabled ? CL_TRUE : CL_FALSE;
6092  return detail::errHandler(
6093  ::clSetKernelExecInfo(
6094  object_,
6095  CL_KERNEL_EXEC_INFO_SVM_FINE_GRAIN_SYSTEM,
6096  sizeof(cl_bool),
6097  &svmEnabled_
6098  )
6099  );
6100  }
6101 
6102  template<int index, int ArrayLength, class D, typename T0, typename T1, typename... Ts>
6103  void setSVMPointersHelper(std::array<void*, ArrayLength> &pointerList, const pointer<T0, D> &t0, const pointer<T1, D> &t1, Ts & ... ts)
6104  {
6105  pointerList[index] = static_cast<void*>(t0.get());
6106  setSVMPointersHelper<index + 1, ArrayLength>(pointerList, t1, ts...);
6107  }
6108 
6109  template<int index, int ArrayLength, typename T0, typename T1, typename... Ts>
6110  typename std::enable_if<std::is_pointer<T0>::value, void>::type
6111  setSVMPointersHelper(std::array<void*, ArrayLength> &pointerList, T0 t0, T1 t1, Ts... ts)
6112  {
6113  pointerList[index] = static_cast<void*>(t0);
6114  setSVMPointersHelper<index + 1, ArrayLength>(pointerList, t1, ts...);
6115  }
6116 
6117  template<int index, int ArrayLength, typename T0, class D>
6118  void setSVMPointersHelper(std::array<void*, ArrayLength> &pointerList, const pointer<T0, D> &t0)
6119  {
6120  pointerList[index] = static_cast<void*>(t0.get());
6121  }
6122 
6123 
6124  template<int index, int ArrayLength, typename T0>
6125  typename std::enable_if<std::is_pointer<T0>::value, void>::type
6126  setSVMPointersHelper(std::array<void*, ArrayLength> &pointerList, T0 t0)
6127  {
6128  pointerList[index] = static_cast<void*>(t0);
6129  }
6130 
6131  template<typename T0, typename... Ts>
6132  cl_int setSVMPointers(const T0 &t0, Ts & ... ts)
6133  {
6134  std::array<void*, 1 + sizeof...(Ts)> pointerList;
6135 
6136  setSVMPointersHelper<0, 1 + sizeof...(Ts)>(pointerList, t0, ts...);
6137  return detail::errHandler(
6138  ::clSetKernelExecInfo(
6139  object_,
6140  CL_KERNEL_EXEC_INFO_SVM_PTRS,
6141  sizeof(void*)*(1 + sizeof...(Ts)),
6142  pointerList.data()));
6143  }
6144 #endif // #if CL_HPP_TARGET_OPENCL_VERSION >= 200
6145 
6146 #if CL_HPP_TARGET_OPENCL_VERSION >= 210
6147 
6153  {
6154  cl_int error;
6155  Kernel retValue(clCloneKernel(this->get(), &error));
6156 
6157  detail::errHandler(error, __CLONE_KERNEL_ERR);
6158  return retValue;
6159  }
6160 #endif // #if CL_HPP_TARGET_OPENCL_VERSION >= 210
6161 };
6162 
6166 class Program : public detail::Wrapper<cl_program>
6167 {
6168 public:
6169 #if !defined(CL_HPP_ENABLE_PROGRAM_CONSTRUCTION_FROM_ARRAY_COMPATIBILITY)
6170  typedef vector<vector<unsigned char>> Binaries;
6171  typedef vector<string> Sources;
6172 #else // #if !defined(CL_HPP_ENABLE_PROGRAM_CONSTRUCTION_FROM_ARRAY_COMPATIBILITY)
6173  typedef vector<std::pair<const void*, size_type> > Binaries;
6174  typedef vector<std::pair<const char*, size_type> > Sources;
6175 #endif // #if !defined(CL_HPP_ENABLE_PROGRAM_CONSTRUCTION_FROM_ARRAY_COMPATIBILITY)
6176 
6177  Program(
6178  const string& source,
6179  bool build = false,
6180  cl_int* err = NULL)
6181  {
6182  cl_int error;
6183 
6184  const char * strings = source.c_str();
6185  const size_type length = source.size();
6186 
6187  Context context = Context::getDefault(err);
6188 
6189  object_ = ::clCreateProgramWithSource(
6190  context(), (cl_uint)1, &strings, &length, &error);
6191 
6192  detail::errHandler(error, __CREATE_PROGRAM_WITH_SOURCE_ERR);
6193 
6194  if (error == CL_SUCCESS && build) {
6195 
6196  error = ::clBuildProgram(
6197  object_,
6198  0,
6199  NULL,
6200 #if !defined(CL_HPP_CL_1_2_DEFAULT_BUILD)
6201  "-cl-std=CL2.0",
6202 #else
6203  "",
6204 #endif // #if !defined(CL_HPP_CL_1_2_DEFAULT_BUILD)
6205  NULL,
6206  NULL);
6207 
6208  detail::buildErrHandler(error, __BUILD_PROGRAM_ERR, getBuildInfo<CL_PROGRAM_BUILD_LOG>());
6209  }
6210 
6211  if (err != NULL) {
6212  *err = error;
6213  }
6214  }
6215 
6216  Program(
6217  const Context& context,
6218  const string& source,
6219  bool build = false,
6220  cl_int* err = NULL)
6221  {
6222  cl_int error;
6223 
6224  const char * strings = source.c_str();
6225  const size_type length = source.size();
6226 
6227  object_ = ::clCreateProgramWithSource(
6228  context(), (cl_uint)1, &strings, &length, &error);
6229 
6230  detail::errHandler(error, __CREATE_PROGRAM_WITH_SOURCE_ERR);
6231 
6232  if (error == CL_SUCCESS && build) {
6233  error = ::clBuildProgram(
6234  object_,
6235  0,
6236  NULL,
6237 #if !defined(CL_HPP_CL_1_2_DEFAULT_BUILD)
6238  "-cl-std=CL2.0",
6239 #else
6240  "",
6241 #endif // #if !defined(CL_HPP_CL_1_2_DEFAULT_BUILD)
6242  NULL,
6243  NULL);
6244 
6245  detail::buildErrHandler(error, __BUILD_PROGRAM_ERR, getBuildInfo<CL_PROGRAM_BUILD_LOG>());
6246  }
6247 
6248  if (err != NULL) {
6249  *err = error;
6250  }
6251  }
6252 
6258  const Sources& sources,
6259  cl_int* err = NULL)
6260  {
6261  cl_int error;
6262  Context context = Context::getDefault(err);
6263 
6264  const size_type n = (size_type)sources.size();
6265 
6266  vector<size_type> lengths(n);
6267  vector<const char*> strings(n);
6268 
6269  for (size_type i = 0; i < n; ++i) {
6270 #if !defined(CL_HPP_ENABLE_PROGRAM_CONSTRUCTION_FROM_ARRAY_COMPATIBILITY)
6271  strings[i] = sources[(int)i].data();
6272  lengths[i] = sources[(int)i].length();
6273 #else // #if !defined(CL_HPP_ENABLE_PROGRAM_CONSTRUCTION_FROM_ARRAY_COMPATIBILITY)
6274  strings[i] = sources[(int)i].first;
6275  lengths[i] = sources[(int)i].second;
6276 #endif // #if !defined(CL_HPP_ENABLE_PROGRAM_CONSTRUCTION_FROM_ARRAY_COMPATIBILITY)
6277  }
6278 
6279  object_ = ::clCreateProgramWithSource(
6280  context(), (cl_uint)n, strings.data(), lengths.data(), &error);
6281 
6282  detail::errHandler(error, __CREATE_PROGRAM_WITH_SOURCE_ERR);
6283  if (err != NULL) {
6284  *err = error;
6285  }
6286  }
6287 
6293  const Context& context,
6294  const Sources& sources,
6295  cl_int* err = NULL)
6296  {
6297  cl_int error;
6298 
6299  const size_type n = (size_type)sources.size();
6300 
6301  vector<size_type> lengths(n);
6302  vector<const char*> strings(n);
6303 
6304  for (size_type i = 0; i < n; ++i) {
6305 #if !defined(CL_HPP_ENABLE_PROGRAM_CONSTRUCTION_FROM_ARRAY_COMPATIBILITY)
6306  strings[i] = sources[(int)i].data();
6307  lengths[i] = sources[(int)i].length();
6308 #else // #if !defined(CL_HPP_ENABLE_PROGRAM_CONSTRUCTION_FROM_ARRAY_COMPATIBILITY)
6309  strings[i] = sources[(int)i].first;
6310  lengths[i] = sources[(int)i].second;
6311 #endif // #if !defined(CL_HPP_ENABLE_PROGRAM_CONSTRUCTION_FROM_ARRAY_COMPATIBILITY)
6312  }
6313 
6314  object_ = ::clCreateProgramWithSource(
6315  context(), (cl_uint)n, strings.data(), lengths.data(), &error);
6316 
6317  detail::errHandler(error, __CREATE_PROGRAM_WITH_SOURCE_ERR);
6318  if (err != NULL) {
6319  *err = error;
6320  }
6321  }
6322 
6323 
6324 #if CL_HPP_TARGET_OPENCL_VERSION >= 210 || (CL_HPP_TARGET_OPENCL_VERSION==200 && defined(CL_HPP_USE_IL_KHR))
6325 
6330  const vector<char>& IL,
6331  bool build = false,
6332  cl_int* err = NULL)
6333  {
6334  cl_int error;
6335 
6336  Context context = Context::getDefault(err);
6337 
6338 #if CL_HPP_TARGET_OPENCL_VERSION >= 210
6339 
6340  object_ = ::clCreateProgramWithIL(
6341  context(), static_cast<const void*>(IL.data()), IL.size(), &error);
6342 
6343 #else // #if CL_HPP_TARGET_OPENCL_VERSION >= 210
6344 
6345  typedef clCreateProgramWithILKHR_fn PFN_clCreateProgramWithILKHR;
6346  static PFN_clCreateProgramWithILKHR pfn_clCreateProgramWithILKHR = NULL;
6347  CL_HPP_INIT_CL_EXT_FCN_PTR_(clCreateProgramWithILKHR);
6348 
6349  return detail::errHandler(
6350  pfn_clCreateProgramWithILKHR(
6351  context(), static_cast<const void*>(IL.data()), IL.size(), &error);
6352 
6353 #endif // #if CL_HPP_TARGET_OPENCL_VERSION >= 210
6354 
6355  detail::errHandler(error, __CREATE_PROGRAM_WITH_IL_ERR);
6356 
6357  if (error == CL_SUCCESS && build) {
6358 
6359  error = ::clBuildProgram(
6360  object_,
6361  0,
6362  NULL,
6363 #if !defined(CL_HPP_CL_1_2_DEFAULT_BUILD)
6364  "-cl-std=CL2.0",
6365 #else
6366  "",
6367 #endif // #if !defined(CL_HPP_CL_1_2_DEFAULT_BUILD)
6368  NULL,
6369  NULL);
6370 
6371  detail::buildErrHandler(error, __BUILD_PROGRAM_ERR, getBuildInfo<CL_PROGRAM_BUILD_LOG>());
6372  }
6373 
6374  if (err != NULL) {
6375  *err = error;
6376  }
6377  }
6378 
6385  const Context& context,
6386  const vector<char>& IL,
6387  bool build = false,
6388  cl_int* err = NULL)
6389  {
6390  cl_int error;
6391 
6392 #if CL_HPP_TARGET_OPENCL_VERSION >= 210
6393 
6394  object_ = ::clCreateProgramWithIL(
6395  context(), static_cast<const void*>(IL.data()), IL.size(), &error);
6396 
6397 #else // #if CL_HPP_TARGET_OPENCL_VERSION >= 210
6398 
6399  typedef clCreateProgramWithILKHR_fn PFN_clCreateProgramWithILKHR;
6400  static PFN_clCreateProgramWithILKHR pfn_clCreateProgramWithILKHR = NULL;
6401  CL_HPP_INIT_CL_EXT_FCN_PTR_(clCreateProgramWithILKHR);
6402 
6403  return detail::errHandler(
6404  pfn_clCreateProgramWithILKHR(
6405  context(), static_cast<const void*>(IL.data()), IL.size(), &error);
6406 
6407 #endif // #if CL_HPP_TARGET_OPENCL_VERSION >= 210
6408 
6409  detail::errHandler(error, __CREATE_PROGRAM_WITH_IL_ERR);
6410 
6411  if (error == CL_SUCCESS && build) {
6412  error = ::clBuildProgram(
6413  object_,
6414  0,
6415  NULL,
6416 #if !defined(CL_HPP_CL_1_2_DEFAULT_BUILD)
6417  "-cl-std=CL2.0",
6418 #else
6419  "",
6420 #endif // #if !defined(CL_HPP_CL_1_2_DEFAULT_BUILD)
6421  NULL,
6422  NULL);
6423 
6424  detail::buildErrHandler(error, __BUILD_PROGRAM_ERR, getBuildInfo<CL_PROGRAM_BUILD_LOG>());
6425  }
6426 
6427  if (err != NULL) {
6428  *err = error;
6429  }
6430  }
6431 #endif // #if CL_HPP_TARGET_OPENCL_VERSION >= 210
6432 
6453  const Context& context,
6454  const vector<Device>& devices,
6455  const Binaries& binaries,
6456  vector<cl_int>* binaryStatus = NULL,
6457  cl_int* err = NULL)
6458  {
6459  cl_int error;
6460 
6461  const size_type numDevices = devices.size();
6462 
6463  // Catch size mismatch early and return
6464  if(binaries.size() != numDevices) {
6465  error = CL_INVALID_VALUE;
6466  detail::errHandler(error, __CREATE_PROGRAM_WITH_BINARY_ERR);
6467  if (err != NULL) {
6468  *err = error;
6469  }
6470  return;
6471  }
6472 
6473 
6474  vector<size_type> lengths(numDevices);
6475  vector<const unsigned char*> images(numDevices);
6476 #if !defined(CL_HPP_ENABLE_PROGRAM_CONSTRUCTION_FROM_ARRAY_COMPATIBILITY)
6477  for (size_type i = 0; i < numDevices; ++i) {
6478  images[i] = binaries[i].data();
6479  lengths[i] = binaries[(int)i].size();
6480  }
6481 #else // #if !defined(CL_HPP_ENABLE_PROGRAM_CONSTRUCTION_FROM_ARRAY_COMPATIBILITY)
6482  for (size_type i = 0; i < numDevices; ++i) {
6483  images[i] = (const unsigned char*)binaries[i].first;
6484  lengths[i] = binaries[(int)i].second;
6485  }
6486 #endif // #if !defined(CL_HPP_ENABLE_PROGRAM_CONSTRUCTION_FROM_ARRAY_COMPATIBILITY)
6487 
6488  vector<cl_device_id> deviceIDs(numDevices);
6489  for( size_type deviceIndex = 0; deviceIndex < numDevices; ++deviceIndex ) {
6490  deviceIDs[deviceIndex] = (devices[deviceIndex])();
6491  }
6492 
6493  if(binaryStatus) {
6494  binaryStatus->resize(numDevices);
6495  }
6496 
6497  object_ = ::clCreateProgramWithBinary(
6498  context(), (cl_uint) devices.size(),
6499  deviceIDs.data(),
6500  lengths.data(), images.data(), (binaryStatus != NULL && numDevices > 0)
6501  ? &binaryStatus->front()
6502  : NULL, &error);
6503 
6504  detail::errHandler(error, __CREATE_PROGRAM_WITH_BINARY_ERR);
6505  if (err != NULL) {
6506  *err = error;
6507  }
6508  }
6509 
6510 
6511 #if CL_HPP_TARGET_OPENCL_VERSION >= 120
6512 
6517  const Context& context,
6518  const vector<Device>& devices,
6519  const string& kernelNames,
6520  cl_int* err = NULL)
6521  {
6522  cl_int error;
6523 
6524 
6525  size_type numDevices = devices.size();
6526  vector<cl_device_id> deviceIDs(numDevices);
6527  for( size_type deviceIndex = 0; deviceIndex < numDevices; ++deviceIndex ) {
6528  deviceIDs[deviceIndex] = (devices[deviceIndex])();
6529  }
6530 
6531  object_ = ::clCreateProgramWithBuiltInKernels(
6532  context(),
6533  (cl_uint) devices.size(),
6534  deviceIDs.data(),
6535  kernelNames.c_str(),
6536  &error);
6537 
6538  detail::errHandler(error, __CREATE_PROGRAM_WITH_BUILT_IN_KERNELS_ERR);
6539  if (err != NULL) {
6540  *err = error;
6541  }
6542  }
6543 #endif // CL_HPP_TARGET_OPENCL_VERSION >= 120
6544 
6545  Program() { }
6546 
6547 
6554  explicit Program(const cl_program& program, bool retainObject = false) :
6555  detail::Wrapper<cl_type>(program, retainObject) { }
6556 
6557  Program& operator = (const cl_program& rhs)
6558  {
6560  return *this;
6561  }
6562 
6566  Program(const Program& program) : detail::Wrapper<cl_type>(program) {}
6567 
6571  Program& operator = (const Program &program)
6572  {
6574  return *this;
6575  }
6576 
6580  Program(Program&& program) CL_HPP_NOEXCEPT_ : detail::Wrapper<cl_type>(std::move(program)) {}
6581 
6585  Program& operator = (Program &&program)
6586  {
6587  detail::Wrapper<cl_type>::operator=(std::move(program));
6588  return *this;
6589  }
6590 
6591  cl_int build(
6592  const vector<Device>& devices,
6593  const char* options = NULL,
6594  void (CL_CALLBACK * notifyFptr)(cl_program, void *) = NULL,
6595  void* data = NULL) const
6596  {
6597  size_type numDevices = devices.size();
6598  vector<cl_device_id> deviceIDs(numDevices);
6599 
6600  for( size_type deviceIndex = 0; deviceIndex < numDevices; ++deviceIndex ) {
6601  deviceIDs[deviceIndex] = (devices[deviceIndex])();
6602  }
6603 
6604  cl_int buildError = ::clBuildProgram(
6605  object_,
6606  (cl_uint)
6607  devices.size(),
6608  deviceIDs.data(),
6609  options,
6610  notifyFptr,
6611  data);
6612 
6613  return detail::buildErrHandler(buildError, __BUILD_PROGRAM_ERR, getBuildInfo<CL_PROGRAM_BUILD_LOG>());
6614  }
6615 
6616  cl_int build(
6617  const char* options = NULL,
6618  void (CL_CALLBACK * notifyFptr)(cl_program, void *) = NULL,
6619  void* data = NULL) const
6620  {
6621  cl_int buildError = ::clBuildProgram(
6622  object_,
6623  0,
6624  NULL,
6625  options,
6626  notifyFptr,
6627  data);
6628 
6629 
6630  return detail::buildErrHandler(buildError, __BUILD_PROGRAM_ERR, getBuildInfo<CL_PROGRAM_BUILD_LOG>());
6631  }
6632 
6633 #if CL_HPP_TARGET_OPENCL_VERSION >= 120
6634  cl_int compile(
6635  const char* options = NULL,
6636  void (CL_CALLBACK * notifyFptr)(cl_program, void *) = NULL,
6637  void* data = NULL) const
6638  {
6639  cl_int error = ::clCompileProgram(
6640  object_,
6641  0,
6642  NULL,
6643  options,
6644  0,
6645  NULL,
6646  NULL,
6647  notifyFptr,
6648  data);
6649  return detail::buildErrHandler(error, __COMPILE_PROGRAM_ERR, getBuildInfo<CL_PROGRAM_BUILD_LOG>());
6650  }
6651 #endif // CL_HPP_TARGET_OPENCL_VERSION >= 120
6652 
6653  template <typename T>
6654  cl_int getInfo(cl_program_info name, T* param) const
6655  {
6656  return detail::errHandler(
6657  detail::getInfo(&::clGetProgramInfo, object_, name, param),
6658  __GET_PROGRAM_INFO_ERR);
6659  }
6660 
6661  template <cl_program_info name> typename
6662  detail::param_traits<detail::cl_program_info, name>::param_type
6663  getInfo(cl_int* err = NULL) const
6664  {
6665  typename detail::param_traits<
6666  detail::cl_program_info, name>::param_type param;
6667  cl_int result = getInfo(name, &param);
6668  if (err != NULL) {
6669  *err = result;
6670  }
6671  return param;
6672  }
6673 
6674  template <typename T>
6675  cl_int getBuildInfo(
6676  const Device& device, cl_program_build_info name, T* param) const
6677  {
6678  return detail::errHandler(
6679  detail::getInfo(
6680  &::clGetProgramBuildInfo, object_, device(), name, param),
6681  __GET_PROGRAM_BUILD_INFO_ERR);
6682  }
6683 
6684  template <cl_program_build_info name> typename
6685  detail::param_traits<detail::cl_program_build_info, name>::param_type
6686  getBuildInfo(const Device& device, cl_int* err = NULL) const
6687  {
6688  typename detail::param_traits<
6689  detail::cl_program_build_info, name>::param_type param;
6690  cl_int result = getBuildInfo(device, name, &param);
6691  if (err != NULL) {
6692  *err = result;
6693  }
6694  return param;
6695  }
6696 
6702  template <cl_program_build_info name>
6703  vector<std::pair<cl::Device, typename detail::param_traits<detail::cl_program_build_info, name>::param_type>>
6704  getBuildInfo(cl_int *err = NULL) const
6705  {
6706  cl_int result = CL_SUCCESS;
6707 
6708  auto devs = getInfo<CL_PROGRAM_DEVICES>(&result);
6709  vector<std::pair<cl::Device, typename detail::param_traits<detail::cl_program_build_info, name>::param_type>>
6710  devInfo;
6711 
6712  // If there was an initial error from getInfo return the error
6713  if (result != CL_SUCCESS) {
6714  if (err != NULL) {
6715  *err = result;
6716  }
6717  return devInfo;
6718  }
6719 
6720  for (const cl::Device &d : devs) {
6721  typename detail::param_traits<
6722  detail::cl_program_build_info, name>::param_type param;
6723  result = getBuildInfo(d, name, &param);
6724  devInfo.push_back(
6726  (d, param));
6727  if (result != CL_SUCCESS) {
6728  // On error, leave the loop and return the error code
6729  break;
6730  }
6731  }
6732  if (err != NULL) {
6733  *err = result;
6734  }
6735  if (result != CL_SUCCESS) {
6736  devInfo.clear();
6737  }
6738  return devInfo;
6739  }
6740 
6741  cl_int createKernels(vector<Kernel>* kernels)
6742  {
6743  cl_uint numKernels;
6744  cl_int err = ::clCreateKernelsInProgram(object_, 0, NULL, &numKernels);
6745  if (err != CL_SUCCESS) {
6746  return detail::errHandler(err, __CREATE_KERNELS_IN_PROGRAM_ERR);
6747  }
6748 
6749  vector<cl_kernel> value(numKernels);
6750 
6751  err = ::clCreateKernelsInProgram(
6752  object_, numKernels, value.data(), NULL);
6753  if (err != CL_SUCCESS) {
6754  return detail::errHandler(err, __CREATE_KERNELS_IN_PROGRAM_ERR);
6755  }
6756 
6757  if (kernels) {
6758  kernels->resize(value.size());
6759 
6760  // Assign to param, constructing with retain behaviour
6761  // to correctly capture each underlying CL object
6762  for (size_type i = 0; i < value.size(); i++) {
6763  // We do not need to retain because this kernel is being created
6764  // by the runtime
6765  (*kernels)[i] = Kernel(value[i], false);
6766  }
6767  }
6768  return CL_SUCCESS;
6769  }
6770 
6771 #if CL_HPP_TARGET_OPENCL_VERSION >= 220
6772 
6783  void (CL_CALLBACK * pfn_notify)(cl_program program, void * user_data),
6784  void * user_data = NULL)
6785  {
6786  return detail::errHandler(
6787  ::clSetProgramReleaseCallback(
6788  object_,
6789  pfn_notify,
6790  user_data),
6791  __SET_PROGRAM_RELEASE_CALLBACK_ERR);
6792  }
6793 
6798  template <typename T>
6799  typename std::enable_if<!std::is_pointer<T>::value, cl_int>::type
6800  setSpecializationConstant(cl_uint index, const T &value)
6801  {
6802  return detail::errHandler(
6803  ::clSetProgramSpecializationConstant(
6804  object_,
6805  index,
6806  sizeof(value),
6807  &value),
6808  __SET_PROGRAM_SPECIALIZATION_CONSTANT_ERR);
6809  }
6810 
6815  cl_int setSpecializationConstant(cl_uint index, size_type size, const void* value)
6816  {
6817  return detail::errHandler(
6818  ::clSetProgramSpecializationConstant(
6819  object_,
6820  index,
6821  size,
6822  value),
6823  __SET_PROGRAM_SPECIALIZATION_CONSTANT_ERR);
6824  }
6825 #endif // CL_HPP_TARGET_OPENCL_VERSION >= 220
6826 };
6827 
6828 #if CL_HPP_TARGET_OPENCL_VERSION >= 120
6829 inline Program linkProgram(
6830  Program input1,
6831  Program input2,
6832  const char* options = NULL,
6833  void (CL_CALLBACK * notifyFptr)(cl_program, void *) = NULL,
6834  void* data = NULL,
6835  cl_int* err = NULL)
6836 {
6837  cl_int error_local = CL_SUCCESS;
6838 
6839  cl_program programs[2] = { input1(), input2() };
6840 
6841  Context ctx = input1.getInfo<CL_PROGRAM_CONTEXT>(&error_local);
6842  if(error_local!=CL_SUCCESS) {
6843  detail::errHandler(error_local, __LINK_PROGRAM_ERR);
6844  }
6845 
6846  cl_program prog = ::clLinkProgram(
6847  ctx(),
6848  0,
6849  NULL,
6850  options,
6851  2,
6852  programs,
6853  notifyFptr,
6854  data,
6855  &error_local);
6856 
6857  detail::errHandler(error_local,__COMPILE_PROGRAM_ERR);
6858  if (err != NULL) {
6859  *err = error_local;
6860  }
6861 
6862  return Program(prog);
6863 }
6864 
6865 inline Program linkProgram(
6866  vector<Program> inputPrograms,
6867  const char* options = NULL,
6868  void (CL_CALLBACK * notifyFptr)(cl_program, void *) = NULL,
6869  void* data = NULL,
6870  cl_int* err = NULL)
6871 {
6872  cl_int error_local = CL_SUCCESS;
6873 
6874  vector<cl_program> programs(inputPrograms.size());
6875 
6876  for (unsigned int i = 0; i < inputPrograms.size(); i++) {
6877  programs[i] = inputPrograms[i]();
6878  }
6879 
6880  Context ctx;
6881  if(inputPrograms.size() > 0) {
6882  ctx = inputPrograms[0].getInfo<CL_PROGRAM_CONTEXT>(&error_local);
6883  if(error_local!=CL_SUCCESS) {
6884  detail::errHandler(error_local, __LINK_PROGRAM_ERR);
6885  }
6886  }
6887  cl_program prog = ::clLinkProgram(
6888  ctx(),
6889  0,
6890  NULL,
6891  options,
6892  (cl_uint)inputPrograms.size(),
6893  programs.data(),
6894  notifyFptr,
6895  data,
6896  &error_local);
6897 
6898  detail::errHandler(error_local,__COMPILE_PROGRAM_ERR);
6899  if (err != NULL) {
6900  *err = error_local;
6901  }
6902 
6903  return Program(prog, false);
6904 }
6905 #endif // CL_HPP_TARGET_OPENCL_VERSION >= 120
6906 
6907 // Template specialization for CL_PROGRAM_BINARIES
6908 template <>
6909 inline cl_int cl::Program::getInfo(cl_program_info name, vector<vector<unsigned char>>* param) const
6910 {
6911  if (name != CL_PROGRAM_BINARIES) {
6912  return CL_INVALID_VALUE;
6913  }
6914  if (param) {
6915  // Resize the parameter array appropriately for each allocation
6916  // and pass down to the helper
6917 
6918  vector<size_type> sizes = getInfo<CL_PROGRAM_BINARY_SIZES>();
6919  size_type numBinaries = sizes.size();
6920 
6921  // Resize the parameter array and constituent arrays
6922  param->resize(numBinaries);
6923  for (size_type i = 0; i < numBinaries; ++i) {
6924  (*param)[i].resize(sizes[i]);
6925  }
6926 
6927  return detail::errHandler(
6928  detail::getInfo(&::clGetProgramInfo, object_, name, param),
6929  __GET_PROGRAM_INFO_ERR);
6930  }
6931 
6932  return CL_SUCCESS;
6933 }
6934 
6935 template<>
6936 inline vector<vector<unsigned char>> cl::Program::getInfo<CL_PROGRAM_BINARIES>(cl_int* err) const
6937 {
6938  vector<vector<unsigned char>> binariesVectors;
6939 
6940  cl_int result = getInfo(CL_PROGRAM_BINARIES, &binariesVectors);
6941  if (err != NULL) {
6942  *err = result;
6943  }
6944  return binariesVectors;
6945 }
6946 
6947 #if CL_HPP_TARGET_OPENCL_VERSION >= 220
6948 // Template specialization for clSetProgramSpecializationConstant
6949 template <>
6950 inline cl_int cl::Program::setSpecializationConstant(cl_uint index, const bool &value)
6951 {
6952  cl_uchar ucValue = value ? CL_UCHAR_MAX : 0;
6953  return detail::errHandler(
6954  ::clSetProgramSpecializationConstant(
6955  object_,
6956  index,
6957  sizeof(ucValue),
6958  &ucValue),
6959  __SET_PROGRAM_SPECIALIZATION_CONSTANT_ERR);
6960 }
6961 #endif // CL_HPP_TARGET_OPENCL_VERSION >= 220
6962 
6963 inline Kernel::Kernel(const Program& program, const char* name, cl_int* err)
6964 {
6965  cl_int error;
6966 
6967  object_ = ::clCreateKernel(program(), name, &error);
6968  detail::errHandler(error, __CREATE_KERNEL_ERR);
6969 
6970  if (err != NULL) {
6971  *err = error;
6972  }
6973 
6974 }
6975 
6976 enum class QueueProperties : cl_command_queue_properties
6977 {
6978  None = 0,
6979  Profiling = CL_QUEUE_PROFILING_ENABLE,
6980  OutOfOrder = CL_QUEUE_OUT_OF_ORDER_EXEC_MODE_ENABLE,
6981 };
6982 
6983 inline QueueProperties operator|(QueueProperties lhs, QueueProperties rhs)
6984 {
6985  return static_cast<QueueProperties>(static_cast<cl_command_queue_properties>(lhs) | static_cast<cl_command_queue_properties>(rhs));
6986 }
6987 
6991 class CommandQueue : public detail::Wrapper<cl_command_queue>
6992 {
6993 private:
6994  static std::once_flag default_initialized_;
6995  static CommandQueue default_;
6996  static cl_int default_error_;
6997 
7003  static void makeDefault()
7004  {
7005  /* We don't want to throw an error from this function, so we have to
7006  * catch and set the error flag.
7007  */
7008 #if defined(CL_HPP_ENABLE_EXCEPTIONS)
7009  try
7010 #endif
7011  {
7012  int error;
7013  Context context = Context::getDefault(&error);
7014 
7015  if (error != CL_SUCCESS) {
7016  default_error_ = error;
7017  }
7018  else {
7019  Device device = Device::getDefault();
7020  default_ = CommandQueue(context, device, 0, &default_error_);
7021  }
7022  }
7023 #if defined(CL_HPP_ENABLE_EXCEPTIONS)
7024  catch (cl::Error &e) {
7025  default_error_ = e.err();
7026  }
7027 #endif
7028  }
7029 
7035  static void makeDefaultProvided(const CommandQueue &c) {
7036  default_ = c;
7037  }
7038 
7039 public:
7040 #ifdef CL_HPP_UNIT_TEST_ENABLE
7041 
7047  static void unitTestClearDefault() {
7048  default_ = CommandQueue();
7049  }
7050 #endif // #ifdef CL_HPP_UNIT_TEST_ENABLE
7051 
7052 
7058  cl_command_queue_properties properties,
7059  cl_int* err = NULL)
7060  {
7061  cl_int error;
7062 
7063  Context context = Context::getDefault(&error);
7064  detail::errHandler(error, __CREATE_CONTEXT_ERR);
7065 
7066  if (error != CL_SUCCESS) {
7067  if (err != NULL) {
7068  *err = error;
7069  }
7070  }
7071  else {
7072  Device device = context.getInfo<CL_CONTEXT_DEVICES>()[0];
7073  bool useWithProperties;
7074 
7075 #if CL_HPP_TARGET_OPENCL_VERSION >= 200 && CL_HPP_MINIMUM_OPENCL_VERSION < 200
7076  // Run-time decision based on the actual platform
7077  {
7078  cl_uint version = detail::getContextPlatformVersion(context());
7079  useWithProperties = (version >= 0x20000); // OpenCL 2.0 or above
7080  }
7081 #elif CL_HPP_TARGET_OPENCL_VERSION >= 200
7082  useWithProperties = true;
7083 #else
7084  useWithProperties = false;
7085 #endif
7086 
7087 #if CL_HPP_TARGET_OPENCL_VERSION >= 200
7088  if (useWithProperties) {
7089  cl_queue_properties queue_properties[] = {
7090  CL_QUEUE_PROPERTIES, properties, 0 };
7091  if ((properties & CL_QUEUE_ON_DEVICE) == 0) {
7092  object_ = ::clCreateCommandQueueWithProperties(
7093  context(), device(), queue_properties, &error);
7094  }
7095  else {
7096  error = CL_INVALID_QUEUE_PROPERTIES;
7097  }
7098 
7099  detail::errHandler(error, __CREATE_COMMAND_QUEUE_WITH_PROPERTIES_ERR);
7100  if (err != NULL) {
7101  *err = error;
7102  }
7103  }
7104 #endif // CL_HPP_TARGET_OPENCL_VERSION >= 200
7105 #if CL_HPP_MINIMUM_OPENCL_VERSION < 200
7106  if (!useWithProperties) {
7107  object_ = ::clCreateCommandQueue(
7108  context(), device(), properties, &error);
7109 
7110  detail::errHandler(error, __CREATE_COMMAND_QUEUE_ERR);
7111  if (err != NULL) {
7112  *err = error;
7113  }
7114  }
7115 #endif // CL_HPP_MINIMUM_OPENCL_VERSION < 200
7116  }
7117  }
7118 
7124  QueueProperties properties,
7125  cl_int* err = NULL)
7126  {
7127  cl_int error;
7128 
7129  Context context = Context::getDefault(&error);
7130  detail::errHandler(error, __CREATE_CONTEXT_ERR);
7131 
7132  if (error != CL_SUCCESS) {
7133  if (err != NULL) {
7134  *err = error;
7135  }
7136  }
7137  else {
7138  Device device = context.getInfo<CL_CONTEXT_DEVICES>()[0];
7139  bool useWithProperties;
7140 
7141 #if CL_HPP_TARGET_OPENCL_VERSION >= 200 && CL_HPP_MINIMUM_OPENCL_VERSION < 200
7142  // Run-time decision based on the actual platform
7143  {
7144  cl_uint version = detail::getContextPlatformVersion(context());
7145  useWithProperties = (version >= 0x20000); // OpenCL 2.0 or above
7146  }
7147 #elif CL_HPP_TARGET_OPENCL_VERSION >= 200
7148  useWithProperties = true;
7149 #else
7150  useWithProperties = false;
7151 #endif
7152 
7153 #if CL_HPP_TARGET_OPENCL_VERSION >= 200
7154  if (useWithProperties) {
7155  cl_queue_properties queue_properties[] = {
7156  CL_QUEUE_PROPERTIES, static_cast<cl_queue_properties>(properties), 0 };
7157 
7158  object_ = ::clCreateCommandQueueWithProperties(
7159  context(), device(), queue_properties, &error);
7160 
7161  detail::errHandler(error, __CREATE_COMMAND_QUEUE_WITH_PROPERTIES_ERR);
7162  if (err != NULL) {
7163  *err = error;
7164  }
7165  }
7166 #endif // CL_HPP_TARGET_OPENCL_VERSION >= 200
7167 #if CL_HPP_MINIMUM_OPENCL_VERSION < 200
7168  if (!useWithProperties) {
7169  object_ = ::clCreateCommandQueue(
7170  context(), device(), static_cast<cl_command_queue_properties>(properties), &error);
7171 
7172  detail::errHandler(error, __CREATE_COMMAND_QUEUE_ERR);
7173  if (err != NULL) {
7174  *err = error;
7175  }
7176  }
7177 #endif // CL_HPP_MINIMUM_OPENCL_VERSION < 200
7178 
7179  }
7180  }
7181 
7186  explicit CommandQueue(
7187  const Context& context,
7188  cl_command_queue_properties properties = 0,
7189  cl_int* err = NULL)
7190  {
7191  cl_int error;
7192  bool useWithProperties;
7193  vector<cl::Device> devices;
7194  error = context.getInfo(CL_CONTEXT_DEVICES, &devices);
7195 
7196  detail::errHandler(error, __CREATE_CONTEXT_ERR);
7197 
7198  if (error != CL_SUCCESS)
7199  {
7200  if (err != NULL) {
7201  *err = error;
7202  }
7203  return;
7204  }
7205 
7206 #if CL_HPP_TARGET_OPENCL_VERSION >= 200 && CL_HPP_MINIMUM_OPENCL_VERSION < 200
7207  // Run-time decision based on the actual platform
7208  {
7209  cl_uint version = detail::getContextPlatformVersion(context());
7210  useWithProperties = (version >= 0x20000); // OpenCL 2.0 or above
7211  }
7212 #elif CL_HPP_TARGET_OPENCL_VERSION >= 200
7213  useWithProperties = true;
7214 #else
7215  useWithProperties = false;
7216 #endif
7217 
7218 #if CL_HPP_TARGET_OPENCL_VERSION >= 200
7219  if (useWithProperties) {
7220  cl_queue_properties queue_properties[] = {
7221  CL_QUEUE_PROPERTIES, properties, 0 };
7222  if ((properties & CL_QUEUE_ON_DEVICE) == 0) {
7223  object_ = ::clCreateCommandQueueWithProperties(
7224  context(), devices[0](), queue_properties, &error);
7225  }
7226  else {
7227  error = CL_INVALID_QUEUE_PROPERTIES;
7228  }
7229 
7230  detail::errHandler(error, __CREATE_COMMAND_QUEUE_WITH_PROPERTIES_ERR);
7231  if (err != NULL) {
7232  *err = error;
7233  }
7234  }
7235 #endif // CL_HPP_TARGET_OPENCL_VERSION >= 200
7236 #if CL_HPP_MINIMUM_OPENCL_VERSION < 200
7237  if (!useWithProperties) {
7238  object_ = ::clCreateCommandQueue(
7239  context(), devices[0](), properties, &error);
7240 
7241  detail::errHandler(error, __CREATE_COMMAND_QUEUE_ERR);
7242  if (err != NULL) {
7243  *err = error;
7244  }
7245  }
7246 #endif // CL_HPP_MINIMUM_OPENCL_VERSION < 200
7247  }
7248 
7253  explicit CommandQueue(
7254  const Context& context,
7255  QueueProperties properties,
7256  cl_int* err = NULL)
7257  {
7258  cl_int error;
7259  bool useWithProperties;
7260  vector<cl::Device> devices;
7261  error = context.getInfo(CL_CONTEXT_DEVICES, &devices);
7262 
7263  detail::errHandler(error, __CREATE_CONTEXT_ERR);
7264 
7265  if (error != CL_SUCCESS)
7266  {
7267  if (err != NULL) {
7268  *err = error;
7269  }
7270  return;
7271  }
7272 
7273 #if CL_HPP_TARGET_OPENCL_VERSION >= 200 && CL_HPP_MINIMUM_OPENCL_VERSION < 200
7274  // Run-time decision based on the actual platform
7275  {
7276  cl_uint version = detail::getContextPlatformVersion(context());
7277  useWithProperties = (version >= 0x20000); // OpenCL 2.0 or above
7278  }
7279 #elif CL_HPP_TARGET_OPENCL_VERSION >= 200
7280  useWithProperties = true;
7281 #else
7282  useWithProperties = false;
7283 #endif
7284 
7285 #if CL_HPP_TARGET_OPENCL_VERSION >= 200
7286  if (useWithProperties) {
7287  cl_queue_properties queue_properties[] = {
7288  CL_QUEUE_PROPERTIES, static_cast<cl_queue_properties>(properties), 0 };
7289  object_ = ::clCreateCommandQueueWithProperties(
7290  context(), devices[0](), queue_properties, &error);
7291 
7292  detail::errHandler(error, __CREATE_COMMAND_QUEUE_WITH_PROPERTIES_ERR);
7293  if (err != NULL) {
7294  *err = error;
7295  }
7296  }
7297 #endif // CL_HPP_TARGET_OPENCL_VERSION >= 200
7298 #if CL_HPP_MINIMUM_OPENCL_VERSION < 200
7299  if (!useWithProperties) {
7300  object_ = ::clCreateCommandQueue(
7301  context(), devices[0](), static_cast<cl_command_queue_properties>(properties), &error);
7302 
7303  detail::errHandler(error, __CREATE_COMMAND_QUEUE_ERR);
7304  if (err != NULL) {
7305  *err = error;
7306  }
7307  }
7308 #endif // CL_HPP_MINIMUM_OPENCL_VERSION < 200
7309  }
7310 
7316  const Context& context,
7317  const Device& device,
7318  cl_command_queue_properties properties = 0,
7319  cl_int* err = NULL)
7320  {
7321  cl_int error;
7322  bool useWithProperties;
7323 
7324 #if CL_HPP_TARGET_OPENCL_VERSION >= 200 && CL_HPP_MINIMUM_OPENCL_VERSION < 200
7325  // Run-time decision based on the actual platform
7326  {
7327  cl_uint version = detail::getContextPlatformVersion(context());
7328  useWithProperties = (version >= 0x20000); // OpenCL 2.0 or above
7329  }
7330 #elif CL_HPP_TARGET_OPENCL_VERSION >= 200
7331  useWithProperties = true;
7332 #else
7333  useWithProperties = false;
7334 #endif
7335 
7336 #if CL_HPP_TARGET_OPENCL_VERSION >= 200
7337  if (useWithProperties) {
7338  cl_queue_properties queue_properties[] = {
7339  CL_QUEUE_PROPERTIES, properties, 0 };
7340  object_ = ::clCreateCommandQueueWithProperties(
7341  context(), device(), queue_properties, &error);
7342 
7343  detail::errHandler(error, __CREATE_COMMAND_QUEUE_WITH_PROPERTIES_ERR);
7344  if (err != NULL) {
7345  *err = error;
7346  }
7347  }
7348 #endif // CL_HPP_TARGET_OPENCL_VERSION >= 200
7349 #if CL_HPP_MINIMUM_OPENCL_VERSION < 200
7350  if (!useWithProperties) {
7351  object_ = ::clCreateCommandQueue(
7352  context(), device(), properties, &error);
7353 
7354  detail::errHandler(error, __CREATE_COMMAND_QUEUE_ERR);
7355  if (err != NULL) {
7356  *err = error;
7357  }
7358  }
7359 #endif // CL_HPP_MINIMUM_OPENCL_VERSION < 200
7360  }
7361 
7367  const Context& context,
7368  const Device& device,
7369  QueueProperties properties,
7370  cl_int* err = NULL)
7371  {
7372  cl_int error;
7373  bool useWithProperties;
7374 
7375 #if CL_HPP_TARGET_OPENCL_VERSION >= 200 && CL_HPP_MINIMUM_OPENCL_VERSION < 200
7376  // Run-time decision based on the actual platform
7377  {
7378  cl_uint version = detail::getContextPlatformVersion(context());
7379  useWithProperties = (version >= 0x20000); // OpenCL 2.0 or above
7380  }
7381 #elif CL_HPP_TARGET_OPENCL_VERSION >= 200
7382  useWithProperties = true;
7383 #else
7384  useWithProperties = false;
7385 #endif
7386 
7387 #if CL_HPP_TARGET_OPENCL_VERSION >= 200
7388  if (useWithProperties) {
7389  cl_queue_properties queue_properties[] = {
7390  CL_QUEUE_PROPERTIES, static_cast<cl_queue_properties>(properties), 0 };
7391  object_ = ::clCreateCommandQueueWithProperties(
7392  context(), device(), queue_properties, &error);
7393 
7394  detail::errHandler(error, __CREATE_COMMAND_QUEUE_WITH_PROPERTIES_ERR);
7395  if (err != NULL) {
7396  *err = error;
7397  }
7398  }
7399 #endif // CL_HPP_TARGET_OPENCL_VERSION >= 200
7400 #if CL_HPP_MINIMUM_OPENCL_VERSION < 200
7401  if (!useWithProperties) {
7402  object_ = ::clCreateCommandQueue(
7403  context(), device(), static_cast<cl_command_queue_properties>(properties), &error);
7404 
7405  detail::errHandler(error, __CREATE_COMMAND_QUEUE_ERR);
7406  if (err != NULL) {
7407  *err = error;
7408  }
7409  }
7410 #endif // CL_HPP_MINIMUM_OPENCL_VERSION < 200
7411  }
7412 
7413  static CommandQueue getDefault(cl_int * err = NULL)
7414  {
7415  std::call_once(default_initialized_, makeDefault);
7416 #if CL_HPP_TARGET_OPENCL_VERSION >= 200
7417  detail::errHandler(default_error_, __CREATE_COMMAND_QUEUE_WITH_PROPERTIES_ERR);
7418 #else // CL_HPP_TARGET_OPENCL_VERSION >= 200
7419  detail::errHandler(default_error_, __CREATE_COMMAND_QUEUE_ERR);
7420 #endif // CL_HPP_TARGET_OPENCL_VERSION >= 200
7421  if (err != NULL) {
7422  *err = default_error_;
7423  }
7424  return default_;
7425  }
7426 
7434  static CommandQueue setDefault(const CommandQueue &default_queue)
7435  {
7436  std::call_once(default_initialized_, makeDefaultProvided, std::cref(default_queue));
7437  detail::errHandler(default_error_);
7438  return default_;
7439  }
7440 
7441  CommandQueue() { }
7442 
7443 
7450  explicit CommandQueue(const cl_command_queue& commandQueue, bool retainObject = false) :
7451  detail::Wrapper<cl_type>(commandQueue, retainObject) { }
7452 
7453  CommandQueue& operator = (const cl_command_queue& rhs)
7454  {
7456  return *this;
7457  }
7458 
7462  CommandQueue(const CommandQueue& queue) : detail::Wrapper<cl_type>(queue) {}
7463 
7467  CommandQueue& operator = (const CommandQueue &queue)
7468  {
7470  return *this;
7471  }
7472 
7476  CommandQueue(CommandQueue&& queue) CL_HPP_NOEXCEPT_ : detail::Wrapper<cl_type>(std::move(queue)) {}
7477 
7481  CommandQueue& operator = (CommandQueue &&queue)
7482  {
7483  detail::Wrapper<cl_type>::operator=(std::move(queue));
7484  return *this;
7485  }
7486 
7487  template <typename T>
7488  cl_int getInfo(cl_command_queue_info name, T* param) const
7489  {
7490  return detail::errHandler(
7491  detail::getInfo(
7492  &::clGetCommandQueueInfo, object_, name, param),
7493  __GET_COMMAND_QUEUE_INFO_ERR);
7494  }
7495 
7496  template <cl_command_queue_info name> typename
7497  detail::param_traits<detail::cl_command_queue_info, name>::param_type
7498  getInfo(cl_int* err = NULL) const
7499  {
7500  typename detail::param_traits<
7501  detail::cl_command_queue_info, name>::param_type param;
7502  cl_int result = getInfo(name, &param);
7503  if (err != NULL) {
7504  *err = result;
7505  }
7506  return param;
7507  }
7508 
7509  cl_int enqueueReadBuffer(
7510  const Buffer& buffer,
7511  cl_bool blocking,
7512  size_type offset,
7513  size_type size,
7514  void* ptr,
7515  const vector<Event>* events = NULL,
7516  Event* event = NULL) const
7517  {
7518  cl_event tmp;
7519  cl_int err = detail::errHandler(
7520  ::clEnqueueReadBuffer(
7521  object_, buffer(), blocking, offset, size,
7522  ptr,
7523  (events != NULL) ? (cl_uint) events->size() : 0,
7524  (events != NULL && events->size() > 0) ? (cl_event*) &events->front() : NULL,
7525  (event != NULL) ? &tmp : NULL),
7526  __ENQUEUE_READ_BUFFER_ERR);
7527 
7528  if (event != NULL && err == CL_SUCCESS)
7529  *event = tmp;
7530 
7531  return err;
7532  }
7533 
7534  cl_int enqueueWriteBuffer(
7535  const Buffer& buffer,
7536  cl_bool blocking,
7537  size_type offset,
7538  size_type size,
7539  const void* ptr,
7540  const vector<Event>* events = NULL,
7541  Event* event = NULL) const
7542  {
7543  cl_event tmp;
7544  cl_int err = detail::errHandler(
7545  ::clEnqueueWriteBuffer(
7546  object_, buffer(), blocking, offset, size,
7547  ptr,
7548  (events != NULL) ? (cl_uint) events->size() : 0,
7549  (events != NULL && events->size() > 0) ? (cl_event*) &events->front() : NULL,
7550  (event != NULL) ? &tmp : NULL),
7551  __ENQUEUE_WRITE_BUFFER_ERR);
7552 
7553  if (event != NULL && err == CL_SUCCESS)
7554  *event = tmp;
7555 
7556  return err;
7557  }
7558 
7559  cl_int enqueueCopyBuffer(
7560  const Buffer& src,
7561  const Buffer& dst,
7562  size_type src_offset,
7563  size_type dst_offset,
7564  size_type size,
7565  const vector<Event>* events = NULL,
7566  Event* event = NULL) const
7567  {
7568  cl_event tmp;
7569  cl_int err = detail::errHandler(
7570  ::clEnqueueCopyBuffer(
7571  object_, src(), dst(), src_offset, dst_offset, size,
7572  (events != NULL) ? (cl_uint) events->size() : 0,
7573  (events != NULL && events->size() > 0) ? (cl_event*) &events->front() : NULL,
7574  (event != NULL) ? &tmp : NULL),
7575  __ENQEUE_COPY_BUFFER_ERR);
7576 
7577  if (event != NULL && err == CL_SUCCESS)
7578  *event = tmp;
7579 
7580  return err;
7581  }
7582 #if CL_HPP_TARGET_OPENCL_VERSION >= 110
7583  cl_int enqueueReadBufferRect(
7584  const Buffer& buffer,
7585  cl_bool blocking,
7586  const array<size_type, 3>& buffer_offset,
7587  const array<size_type, 3>& host_offset,
7588  const array<size_type, 3>& region,
7589  size_type buffer_row_pitch,
7590  size_type buffer_slice_pitch,
7591  size_type host_row_pitch,
7592  size_type host_slice_pitch,
7593  void *ptr,
7594  const vector<Event>* events = NULL,
7595  Event* event = NULL) const
7596  {
7597  cl_event tmp;
7598  cl_int err = detail::errHandler(
7599  ::clEnqueueReadBufferRect(
7600  object_,
7601  buffer(),
7602  blocking,
7603  buffer_offset.data(),
7604  host_offset.data(),
7605  region.data(),
7606  buffer_row_pitch,
7607  buffer_slice_pitch,
7608  host_row_pitch,
7609  host_slice_pitch,
7610  ptr,
7611  (events != NULL) ? (cl_uint) events->size() : 0,
7612  (events != NULL && events->size() > 0) ? (cl_event*) &events->front() : NULL,
7613  (event != NULL) ? &tmp : NULL),
7614  __ENQUEUE_READ_BUFFER_RECT_ERR);
7615 
7616  if (event != NULL && err == CL_SUCCESS)
7617  *event = tmp;
7618 
7619  return err;
7620  }
7621 
7622  cl_int enqueueWriteBufferRect(
7623  const Buffer& buffer,
7624  cl_bool blocking,
7625  const array<size_type, 3>& buffer_offset,
7626  const array<size_type, 3>& host_offset,
7627  const array<size_type, 3>& region,
7628  size_type buffer_row_pitch,
7629  size_type buffer_slice_pitch,
7630  size_type host_row_pitch,
7631  size_type host_slice_pitch,
7632  const void *ptr,
7633  const vector<Event>* events = NULL,
7634  Event* event = NULL) const
7635  {
7636  cl_event tmp;
7637  cl_int err = detail::errHandler(
7638  ::clEnqueueWriteBufferRect(
7639  object_,
7640  buffer(),
7641  blocking,
7642  buffer_offset.data(),
7643  host_offset.data(),
7644  region.data(),
7645  buffer_row_pitch,
7646  buffer_slice_pitch,
7647  host_row_pitch,
7648  host_slice_pitch,
7649  ptr,
7650  (events != NULL) ? (cl_uint) events->size() : 0,
7651  (events != NULL && events->size() > 0) ? (cl_event*) &events->front() : NULL,
7652  (event != NULL) ? &tmp : NULL),
7653  __ENQUEUE_WRITE_BUFFER_RECT_ERR);
7654 
7655  if (event != NULL && err == CL_SUCCESS)
7656  *event = tmp;
7657 
7658  return err;
7659  }
7660 
7661  cl_int enqueueCopyBufferRect(
7662  const Buffer& src,
7663  const Buffer& dst,
7664  const array<size_type, 3>& src_origin,
7665  const array<size_type, 3>& dst_origin,
7666  const array<size_type, 3>& region,
7667  size_type src_row_pitch,
7668  size_type src_slice_pitch,
7669  size_type dst_row_pitch,
7670  size_type dst_slice_pitch,
7671  const vector<Event>* events = NULL,
7672  Event* event = NULL) const
7673  {
7674  cl_event tmp;
7675  cl_int err = detail::errHandler(
7676  ::clEnqueueCopyBufferRect(
7677  object_,
7678  src(),
7679  dst(),
7680  src_origin.data(),
7681  dst_origin.data(),
7682  region.data(),
7683  src_row_pitch,
7684  src_slice_pitch,
7685  dst_row_pitch,
7686  dst_slice_pitch,
7687  (events != NULL) ? (cl_uint) events->size() : 0,
7688  (events != NULL && events->size() > 0) ? (cl_event*) &events->front() : NULL,
7689  (event != NULL) ? &tmp : NULL),
7690  __ENQEUE_COPY_BUFFER_RECT_ERR);
7691 
7692  if (event != NULL && err == CL_SUCCESS)
7693  *event = tmp;
7694 
7695  return err;
7696  }
7697 #endif // CL_HPP_TARGET_OPENCL_VERSION >= 110
7698 #if CL_HPP_TARGET_OPENCL_VERSION >= 120
7699 
7710  template<typename PatternType>
7712  const Buffer& buffer,
7713  PatternType pattern,
7714  size_type offset,
7715  size_type size,
7716  const vector<Event>* events = NULL,
7717  Event* event = NULL) const
7718  {
7719  cl_event tmp;
7720  cl_int err = detail::errHandler(
7721  ::clEnqueueFillBuffer(
7722  object_,
7723  buffer(),
7724  static_cast<void*>(&pattern),
7725  sizeof(PatternType),
7726  offset,
7727  size,
7728  (events != NULL) ? (cl_uint) events->size() : 0,
7729  (events != NULL && events->size() > 0) ? (cl_event*) &events->front() : NULL,
7730  (event != NULL) ? &tmp : NULL),
7731  __ENQUEUE_FILL_BUFFER_ERR);
7732 
7733  if (event != NULL && err == CL_SUCCESS)
7734  *event = tmp;
7735 
7736  return err;
7737  }
7738 #endif // CL_HPP_TARGET_OPENCL_VERSION >= 120
7739 
7740  cl_int enqueueReadImage(
7741  const Image& image,
7742  cl_bool blocking,
7743  const array<size_type, 3>& origin,
7744  const array<size_type, 3>& region,
7745  size_type row_pitch,
7746  size_type slice_pitch,
7747  void* ptr,
7748  const vector<Event>* events = NULL,
7749  Event* event = NULL) const
7750  {
7751  cl_event tmp;
7752  cl_int err = detail::errHandler(
7753  ::clEnqueueReadImage(
7754  object_,
7755  image(),
7756  blocking,
7757  origin.data(),
7758  region.data(),
7759  row_pitch,
7760  slice_pitch,
7761  ptr,
7762  (events != NULL) ? (cl_uint) events->size() : 0,
7763  (events != NULL && events->size() > 0) ? (cl_event*) &events->front() : NULL,
7764  (event != NULL) ? &tmp : NULL),
7765  __ENQUEUE_READ_IMAGE_ERR);
7766 
7767  if (event != NULL && err == CL_SUCCESS)
7768  *event = tmp;
7769 
7770  return err;
7771  }
7772 
7773  cl_int enqueueWriteImage(
7774  const Image& image,
7775  cl_bool blocking,
7776  const array<size_type, 3>& origin,
7777  const array<size_type, 3>& region,
7778  size_type row_pitch,
7779  size_type slice_pitch,
7780  const void* ptr,
7781  const vector<Event>* events = NULL,
7782  Event* event = NULL) const
7783  {
7784  cl_event tmp;
7785  cl_int err = detail::errHandler(
7786  ::clEnqueueWriteImage(
7787  object_,
7788  image(),
7789  blocking,
7790  origin.data(),
7791  region.data(),
7792  row_pitch,
7793  slice_pitch,
7794  ptr,
7795  (events != NULL) ? (cl_uint) events->size() : 0,
7796  (events != NULL && events->size() > 0) ? (cl_event*) &events->front() : NULL,
7797  (event != NULL) ? &tmp : NULL),
7798  __ENQUEUE_WRITE_IMAGE_ERR);
7799 
7800  if (event != NULL && err == CL_SUCCESS)
7801  *event = tmp;
7802 
7803  return err;
7804  }
7805 
7806  cl_int enqueueCopyImage(
7807  const Image& src,
7808  const Image& dst,
7809  const array<size_type, 3>& src_origin,
7810  const array<size_type, 3>& dst_origin,
7811  const array<size_type, 3>& region,
7812  const vector<Event>* events = NULL,
7813  Event* event = NULL) const
7814  {
7815  cl_event tmp;
7816  cl_int err = detail::errHandler(
7817  ::clEnqueueCopyImage(
7818  object_,
7819  src(),
7820  dst(),
7821  src_origin.data(),
7822  dst_origin.data(),
7823  region.data(),
7824  (events != NULL) ? (cl_uint) events->size() : 0,
7825  (events != NULL && events->size() > 0) ? (cl_event*) &events->front() : NULL,
7826  (event != NULL) ? &tmp : NULL),
7827  __ENQUEUE_COPY_IMAGE_ERR);
7828 
7829  if (event != NULL && err == CL_SUCCESS)
7830  *event = tmp;
7831 
7832  return err;
7833  }
7834 
7835 #if CL_HPP_TARGET_OPENCL_VERSION >= 120
7836 
7844  const Image& image,
7845  cl_float4 fillColor,
7846  const array<size_type, 3>& origin,
7847  const array<size_type, 3>& region,
7848  const vector<Event>* events = NULL,
7849  Event* event = NULL) const
7850  {
7851  cl_event tmp;
7852  cl_int err = detail::errHandler(
7853  ::clEnqueueFillImage(
7854  object_,
7855  image(),
7856  static_cast<void*>(&fillColor),
7857  origin.data(),
7858  region.data(),
7859  (events != NULL) ? (cl_uint) events->size() : 0,
7860  (events != NULL && events->size() > 0) ? (cl_event*) &events->front() : NULL,
7861  (event != NULL) ? &tmp : NULL),
7862  __ENQUEUE_FILL_IMAGE_ERR);
7863 
7864  if (event != NULL && err == CL_SUCCESS)
7865  *event = tmp;
7866 
7867  return err;
7868  }
7869 
7878  const Image& image,
7879  cl_int4 fillColor,
7880  const array<size_type, 3>& origin,
7881  const array<size_type, 3>& region,
7882  const vector<Event>* events = NULL,
7883  Event* event = NULL) const
7884  {
7885  cl_event tmp;
7886  cl_int err = detail::errHandler(
7887  ::clEnqueueFillImage(
7888  object_,
7889  image(),
7890  static_cast<void*>(&fillColor),
7891  origin.data(),
7892  region.data(),
7893  (events != NULL) ? (cl_uint) events->size() : 0,
7894  (events != NULL && events->size() > 0) ? (cl_event*) &events->front() : NULL,
7895  (event != NULL) ? &tmp : NULL),
7896  __ENQUEUE_FILL_IMAGE_ERR);
7897 
7898  if (event != NULL && err == CL_SUCCESS)
7899  *event = tmp;
7900 
7901  return err;
7902  }
7903 
7912  const Image& image,
7913  cl_uint4 fillColor,
7914  const array<size_type, 3>& origin,
7915  const array<size_type, 3>& region,
7916  const vector<Event>* events = NULL,
7917  Event* event = NULL) const
7918  {
7919  cl_event tmp;
7920  cl_int err = detail::errHandler(
7921  ::clEnqueueFillImage(
7922  object_,
7923  image(),
7924  static_cast<void*>(&fillColor),
7925  origin.data(),
7926  region.data(),
7927  (events != NULL) ? (cl_uint) events->size() : 0,
7928  (events != NULL && events->size() > 0) ? (cl_event*) &events->front() : NULL,
7929  (event != NULL) ? &tmp : NULL),
7930  __ENQUEUE_FILL_IMAGE_ERR);
7931 
7932  if (event != NULL && err == CL_SUCCESS)
7933  *event = tmp;
7934 
7935  return err;
7936  }
7937 #endif // CL_HPP_TARGET_OPENCL_VERSION >= 120
7938 
7939  cl_int enqueueCopyImageToBuffer(
7940  const Image& src,
7941  const Buffer& dst,
7942  const array<size_type, 3>& src_origin,
7943  const array<size_type, 3>& region,
7944  size_type dst_offset,
7945  const vector<Event>* events = NULL,
7946  Event* event = NULL) const
7947  {
7948  cl_event tmp;
7949  cl_int err = detail::errHandler(
7950  ::clEnqueueCopyImageToBuffer(
7951  object_,
7952  src(),
7953  dst(),
7954  src_origin.data(),
7955  region.data(),
7956  dst_offset,
7957  (events != NULL) ? (cl_uint) events->size() : 0,
7958  (events != NULL && events->size() > 0) ? (cl_event*) &events->front() : NULL,
7959  (event != NULL) ? &tmp : NULL),
7960  __ENQUEUE_COPY_IMAGE_TO_BUFFER_ERR);
7961 
7962  if (event != NULL && err == CL_SUCCESS)
7963  *event = tmp;
7964 
7965  return err;
7966  }
7967 
7968  cl_int enqueueCopyBufferToImage(
7969  const Buffer& src,
7970  const Image& dst,
7971  size_type src_offset,
7972  const array<size_type, 3>& dst_origin,
7973  const array<size_type, 3>& region,
7974  const vector<Event>* events = NULL,
7975  Event* event = NULL) const
7976  {
7977  cl_event tmp;
7978  cl_int err = detail::errHandler(
7979  ::clEnqueueCopyBufferToImage(
7980  object_,
7981  src(),
7982  dst(),
7983  src_offset,
7984  dst_origin.data(),
7985  region.data(),
7986  (events != NULL) ? (cl_uint) events->size() : 0,
7987  (events != NULL && events->size() > 0) ? (cl_event*) &events->front() : NULL,
7988  (event != NULL) ? &tmp : NULL),
7989  __ENQUEUE_COPY_BUFFER_TO_IMAGE_ERR);
7990 
7991  if (event != NULL && err == CL_SUCCESS)
7992  *event = tmp;
7993 
7994  return err;
7995  }
7996 
7997  void* enqueueMapBuffer(
7998  const Buffer& buffer,
7999  cl_bool blocking,
8000  cl_map_flags flags,
8001  size_type offset,
8002  size_type size,
8003  const vector<Event>* events = NULL,
8004  Event* event = NULL,
8005  cl_int* err = NULL) const
8006  {
8007  cl_event tmp;
8008  cl_int error;
8009  void * result = ::clEnqueueMapBuffer(
8010  object_, buffer(), blocking, flags, offset, size,
8011  (events != NULL) ? (cl_uint) events->size() : 0,
8012  (events != NULL && events->size() > 0) ? (cl_event*) &events->front() : NULL,
8013  (event != NULL) ? &tmp : NULL,
8014  &error);
8015 
8016  detail::errHandler(error, __ENQUEUE_MAP_BUFFER_ERR);
8017  if (err != NULL) {
8018  *err = error;
8019  }
8020  if (event != NULL && error == CL_SUCCESS)
8021  *event = tmp;
8022 
8023  return result;
8024  }
8025 
8026  void* enqueueMapImage(
8027  const Image& buffer,
8028  cl_bool blocking,
8029  cl_map_flags flags,
8030  const array<size_type, 3>& origin,
8031  const array<size_type, 3>& region,
8032  size_type * row_pitch,
8033  size_type * slice_pitch,
8034  const vector<Event>* events = NULL,
8035  Event* event = NULL,
8036  cl_int* err = NULL) const
8037  {
8038  cl_event tmp;
8039  cl_int error;
8040  void * result = ::clEnqueueMapImage(
8041  object_, buffer(), blocking, flags,
8042  origin.data(),
8043  region.data(),
8044  row_pitch, slice_pitch,
8045  (events != NULL) ? (cl_uint) events->size() : 0,
8046  (events != NULL && events->size() > 0) ? (cl_event*) &events->front() : NULL,
8047  (event != NULL) ? &tmp : NULL,
8048  &error);
8049 
8050  detail::errHandler(error, __ENQUEUE_MAP_IMAGE_ERR);
8051  if (err != NULL) {
8052  *err = error;
8053  }
8054  if (event != NULL && error == CL_SUCCESS)
8055  *event = tmp;
8056  return result;
8057  }
8058 
8059 #if CL_HPP_TARGET_OPENCL_VERSION >= 200
8060 
8064  template<typename T>
8066  T* ptr,
8067  cl_bool blocking,
8068  cl_map_flags flags,
8069  size_type size,
8070  const vector<Event>* events = NULL,
8071  Event* event = NULL) const
8072  {
8073  cl_event tmp;
8074  cl_int err = detail::errHandler(::clEnqueueSVMMap(
8075  object_, blocking, flags, static_cast<void*>(ptr), size,
8076  (events != NULL) ? (cl_uint)events->size() : 0,
8077  (events != NULL && events->size() > 0) ? (cl_event*)&events->front() : NULL,
8078  (event != NULL) ? &tmp : NULL),
8079  __ENQUEUE_MAP_BUFFER_ERR);
8080 
8081  if (event != NULL && err == CL_SUCCESS)
8082  *event = tmp;
8083 
8084  return err;
8085  }
8086 
8087 
8092  template<typename T, class D>
8094  cl::pointer<T, D> &ptr,
8095  cl_bool blocking,
8096  cl_map_flags flags,
8097  size_type size,
8098  const vector<Event>* events = NULL,
8099  Event* event = NULL) const
8100  {
8101  cl_event tmp;
8102  cl_int err = detail::errHandler(::clEnqueueSVMMap(
8103  object_, blocking, flags, static_cast<void*>(ptr.get()), size,
8104  (events != NULL) ? (cl_uint)events->size() : 0,
8105  (events != NULL && events->size() > 0) ? (cl_event*)&events->front() : NULL,
8106  (event != NULL) ? &tmp : NULL),
8107  __ENQUEUE_MAP_BUFFER_ERR);
8108 
8109  if (event != NULL && err == CL_SUCCESS)
8110  *event = tmp;
8111 
8112  return err;
8113  }
8114 
8119  template<typename T, class Alloc>
8121  cl::vector<T, Alloc> &container,
8122  cl_bool blocking,
8123  cl_map_flags flags,
8124  const vector<Event>* events = NULL,
8125  Event* event = NULL) const
8126  {
8127  cl_event tmp;
8128  cl_int err = detail::errHandler(::clEnqueueSVMMap(
8129  object_, blocking, flags, static_cast<void*>(container.data()), container.size(),
8130  (events != NULL) ? (cl_uint)events->size() : 0,
8131  (events != NULL && events->size() > 0) ? (cl_event*)&events->front() : NULL,
8132  (event != NULL) ? &tmp : NULL),
8133  __ENQUEUE_MAP_BUFFER_ERR);
8134 
8135  if (event != NULL && err == CL_SUCCESS)
8136  *event = tmp;
8137 
8138  return err;
8139  }
8140 #endif // #if CL_HPP_TARGET_OPENCL_VERSION >= 200
8141 
8142  cl_int enqueueUnmapMemObject(
8143  const Memory& memory,
8144  void* mapped_ptr,
8145  const vector<Event>* events = NULL,
8146  Event* event = NULL) const
8147  {
8148  cl_event tmp;
8149  cl_int err = detail::errHandler(
8150  ::clEnqueueUnmapMemObject(
8151  object_, memory(), mapped_ptr,
8152  (events != NULL) ? (cl_uint) events->size() : 0,
8153  (events != NULL && events->size() > 0) ? (cl_event*) &events->front() : NULL,
8154  (event != NULL) ? &tmp : NULL),
8155  __ENQUEUE_UNMAP_MEM_OBJECT_ERR);
8156 
8157  if (event != NULL && err == CL_SUCCESS)
8158  *event = tmp;
8159 
8160  return err;
8161  }
8162 
8163 
8164 #if CL_HPP_TARGET_OPENCL_VERSION >= 200
8165 
8169  template<typename T>
8171  T* ptr,
8172  const vector<Event>* events = NULL,
8173  Event* event = NULL) const
8174  {
8175  cl_event tmp;
8176  cl_int err = detail::errHandler(
8177  ::clEnqueueSVMUnmap(
8178  object_, static_cast<void*>(ptr),
8179  (events != NULL) ? (cl_uint)events->size() : 0,
8180  (events != NULL && events->size() > 0) ? (cl_event*)&events->front() : NULL,
8181  (event != NULL) ? &tmp : NULL),
8182  __ENQUEUE_UNMAP_MEM_OBJECT_ERR);
8183 
8184  if (event != NULL && err == CL_SUCCESS)
8185  *event = tmp;
8186 
8187  return err;
8188  }
8189 
8194  template<typename T, class D>
8196  cl::pointer<T, D> &ptr,
8197  const vector<Event>* events = NULL,
8198  Event* event = NULL) const
8199  {
8200  cl_event tmp;
8201  cl_int err = detail::errHandler(
8202  ::clEnqueueSVMUnmap(
8203  object_, static_cast<void*>(ptr.get()),
8204  (events != NULL) ? (cl_uint)events->size() : 0,
8205  (events != NULL && events->size() > 0) ? (cl_event*)&events->front() : NULL,
8206  (event != NULL) ? &tmp : NULL),
8207  __ENQUEUE_UNMAP_MEM_OBJECT_ERR);
8208 
8209  if (event != NULL && err == CL_SUCCESS)
8210  *event = tmp;
8211 
8212  return err;
8213  }
8214 
8219  template<typename T, class Alloc>
8221  cl::vector<T, Alloc> &container,
8222  const vector<Event>* events = NULL,
8223  Event* event = NULL) const
8224  {
8225  cl_event tmp;
8226  cl_int err = detail::errHandler(
8227  ::clEnqueueSVMUnmap(
8228  object_, static_cast<void*>(container.data()),
8229  (events != NULL) ? (cl_uint)events->size() : 0,
8230  (events != NULL && events->size() > 0) ? (cl_event*)&events->front() : NULL,
8231  (event != NULL) ? &tmp : NULL),
8232  __ENQUEUE_UNMAP_MEM_OBJECT_ERR);
8233 
8234  if (event != NULL && err == CL_SUCCESS)
8235  *event = tmp;
8236 
8237  return err;
8238  }
8239 #endif // #if CL_HPP_TARGET_OPENCL_VERSION >= 200
8240 
8241 #if CL_HPP_TARGET_OPENCL_VERSION >= 120
8242 
8254  const vector<Event> *events = 0,
8255  Event *event = 0) const
8256  {
8257  cl_event tmp;
8258  cl_int err = detail::errHandler(
8259  ::clEnqueueMarkerWithWaitList(
8260  object_,
8261  (events != NULL) ? (cl_uint) events->size() : 0,
8262  (events != NULL && events->size() > 0) ? (cl_event*) &events->front() : NULL,
8263  (event != NULL) ? &tmp : NULL),
8264  __ENQUEUE_MARKER_WAIT_LIST_ERR);
8265 
8266  if (event != NULL && err == CL_SUCCESS)
8267  *event = tmp;
8268 
8269  return err;
8270  }
8271 
8284  const vector<Event> *events = 0,
8285  Event *event = 0) const
8286  {
8287  cl_event tmp;
8288  cl_int err = detail::errHandler(
8289  ::clEnqueueBarrierWithWaitList(
8290  object_,
8291  (events != NULL) ? (cl_uint) events->size() : 0,
8292  (events != NULL && events->size() > 0) ? (cl_event*) &events->front() : NULL,
8293  (event != NULL) ? &tmp : NULL),
8294  __ENQUEUE_BARRIER_WAIT_LIST_ERR);
8295 
8296  if (event != NULL && err == CL_SUCCESS)
8297  *event = tmp;
8298 
8299  return err;
8300  }
8301 
8307  const vector<Memory> &memObjects,
8308  cl_mem_migration_flags flags,
8309  const vector<Event>* events = NULL,
8310  Event* event = NULL
8311  ) const
8312  {
8313  cl_event tmp;
8314 
8315  vector<cl_mem> localMemObjects(memObjects.size());
8316 
8317  for( int i = 0; i < (int)memObjects.size(); ++i ) {
8318  localMemObjects[i] = memObjects[i]();
8319  }
8320 
8321  cl_int err = detail::errHandler(
8322  ::clEnqueueMigrateMemObjects(
8323  object_,
8324  (cl_uint)memObjects.size(),
8325  localMemObjects.data(),
8326  flags,
8327  (events != NULL) ? (cl_uint) events->size() : 0,
8328  (events != NULL && events->size() > 0) ? (cl_event*) &events->front() : NULL,
8329  (event != NULL) ? &tmp : NULL),
8330  __ENQUEUE_UNMAP_MEM_OBJECT_ERR);
8331 
8332  if (event != NULL && err == CL_SUCCESS)
8333  *event = tmp;
8334 
8335  return err;
8336  }
8337 #endif // CL_HPP_TARGET_OPENCL_VERSION >= 120
8338 
8339 
8340 #if CL_HPP_TARGET_OPENCL_VERSION >= 210
8341 
8346  template<typename T>
8348  const cl::vector<T*> &svmRawPointers,
8349  const cl::vector<size_type> &sizes,
8350  cl_mem_migration_flags flags = 0,
8351  const vector<Event>* events = NULL,
8352  Event* event = NULL) const
8353  {
8354  cl_event tmp;
8355  cl_int err = detail::errHandler(::clEnqueueSVMMigrateMem(
8356  object_,
8357  svmRawPointers.size(), static_cast<void**>(svmRawPointers.data()),
8358  sizes.data(), // array of sizes not passed
8359  flags,
8360  (events != NULL) ? (cl_uint)events->size() : 0,
8361  (events != NULL && events->size() > 0) ? (cl_event*)&events->front() : NULL,
8362  (event != NULL) ? &tmp : NULL),
8363  __ENQUEUE_MIGRATE_SVM_ERR);
8364 
8365  if (event != NULL && err == CL_SUCCESS)
8366  *event = tmp;
8367 
8368  return err;
8369  }
8370 
8375  template<typename T>
8377  const cl::vector<T*> &svmRawPointers,
8378  cl_mem_migration_flags flags = 0,
8379  const vector<Event>* events = NULL,
8380  Event* event = NULL) const
8381  {
8382  return enqueueMigrateSVM(svmRawPointers, cl::vector<size_type>(svmRawPointers.size()), flags, events, event);
8383  }
8384 
8385 
8391  template<typename T, class D>
8393  const cl::vector<cl::pointer<T, D>> &svmPointers,
8394  const cl::vector<size_type> &sizes,
8395  cl_mem_migration_flags flags = 0,
8396  const vector<Event>* events = NULL,
8397  Event* event = NULL) const
8398  {
8399  cl::vector<void*> svmRawPointers;
8400  svmRawPointers.reserve(svmPointers.size());
8401  for (auto p : svmPointers) {
8402  svmRawPointers.push_back(static_cast<void*>(p.get()));
8403  }
8404 
8405  return enqueueMigrateSVM(svmRawPointers, sizes, flags, events, event);
8406  }
8407 
8408 
8413  template<typename T, class D>
8415  const cl::vector<cl::pointer<T, D>> &svmPointers,
8416  cl_mem_migration_flags flags = 0,
8417  const vector<Event>* events = NULL,
8418  Event* event = NULL) const
8419  {
8420  return enqueueMigrateSVM(svmPointers, cl::vector<size_type>(svmPointers.size()), flags, events, event);
8421  }
8422 
8428  template<typename T, class Alloc>
8430  const cl::vector<cl::vector<T, Alloc>> &svmContainers,
8431  const cl::vector<size_type> &sizes,
8432  cl_mem_migration_flags flags = 0,
8433  const vector<Event>* events = NULL,
8434  Event* event = NULL) const
8435  {
8436  cl::vector<void*> svmRawPointers;
8437  svmRawPointers.reserve(svmContainers.size());
8438  for (auto p : svmContainers) {
8439  svmRawPointers.push_back(static_cast<void*>(p.data()));
8440  }
8441 
8442  return enqueueMigrateSVM(svmRawPointers, sizes, flags, events, event);
8443  }
8444 
8449  template<typename T, class Alloc>
8451  const cl::vector<cl::vector<T, Alloc>> &svmContainers,
8452  cl_mem_migration_flags flags = 0,
8453  const vector<Event>* events = NULL,
8454  Event* event = NULL) const
8455  {
8456  return enqueueMigrateSVM(svmContainers, cl::vector<size_type>(svmContainers.size()), flags, events, event);
8457  }
8458 
8459 #endif // #if CL_HPP_TARGET_OPENCL_VERSION >= 210
8460 
8461  cl_int enqueueNDRangeKernel(
8462  const Kernel& kernel,
8463  const NDRange& offset,
8464  const NDRange& global,
8465  const NDRange& local = NullRange,
8466  const vector<Event>* events = NULL,
8467  Event* event = NULL) const
8468  {
8469  cl_event tmp;
8470  cl_int err = detail::errHandler(
8471  ::clEnqueueNDRangeKernel(
8472  object_, kernel(), (cl_uint) global.dimensions(),
8473  offset.dimensions() != 0 ? (const size_type*) offset : NULL,
8474  (const size_type*) global,
8475  local.dimensions() != 0 ? (const size_type*) local : NULL,
8476  (events != NULL) ? (cl_uint) events->size() : 0,
8477  (events != NULL && events->size() > 0) ? (cl_event*) &events->front() : NULL,
8478  (event != NULL) ? &tmp : NULL),
8479  __ENQUEUE_NDRANGE_KERNEL_ERR);
8480 
8481  if (event != NULL && err == CL_SUCCESS)
8482  *event = tmp;
8483 
8484  return err;
8485  }
8486 
8487 #if defined(CL_USE_DEPRECATED_OPENCL_1_2_APIS)
8488  CL_EXT_PREFIX__VERSION_1_2_DEPRECATED cl_int enqueueTask(
8489  const Kernel& kernel,
8490  const vector<Event>* events = NULL,
8491  Event* event = NULL) const CL_EXT_SUFFIX__VERSION_1_2_DEPRECATED
8492  {
8493  cl_event tmp;
8494  cl_int err = detail::errHandler(
8495  ::clEnqueueTask(
8496  object_, kernel(),
8497  (events != NULL) ? (cl_uint) events->size() : 0,
8498  (events != NULL && events->size() > 0) ? (cl_event*) &events->front() : NULL,
8499  (event != NULL) ? &tmp : NULL),
8500  __ENQUEUE_TASK_ERR);
8501 
8502  if (event != NULL && err == CL_SUCCESS)
8503  *event = tmp;
8504 
8505  return err;
8506  }
8507 #endif // #if defined(CL_USE_DEPRECATED_OPENCL_1_2_APIS)
8508 
8509  cl_int enqueueNativeKernel(
8510  void (CL_CALLBACK *userFptr)(void *),
8511  std::pair<void*, size_type> args,
8512  const vector<Memory>* mem_objects = NULL,
8513  const vector<const void*>* mem_locs = NULL,
8514  const vector<Event>* events = NULL,
8515  Event* event = NULL) const
8516  {
8517  size_type elements = 0;
8518  if (mem_objects != NULL) {
8519  elements = mem_objects->size();
8520  }
8521  vector<cl_mem> mems(elements);
8522  for (unsigned int i = 0; i < elements; i++) {
8523  mems[i] = ((*mem_objects)[i])();
8524  }
8525 
8526  cl_event tmp;
8527  cl_int err = detail::errHandler(
8528  ::clEnqueueNativeKernel(
8529  object_, userFptr, args.first, args.second,
8530  (mem_objects != NULL) ? (cl_uint) mem_objects->size() : 0,
8531  mems.data(),
8532  (mem_locs != NULL && mem_locs->size() > 0) ? (const void **) &mem_locs->front() : NULL,
8533  (events != NULL) ? (cl_uint) events->size() : 0,
8534  (events != NULL && events->size() > 0) ? (cl_event*) &events->front() : NULL,
8535  (event != NULL) ? &tmp : NULL),
8536  __ENQUEUE_NATIVE_KERNEL);
8537 
8538  if (event != NULL && err == CL_SUCCESS)
8539  *event = tmp;
8540 
8541  return err;
8542  }
8543 
8547 #if defined(CL_USE_DEPRECATED_OPENCL_1_1_APIS)
8548  CL_EXT_PREFIX__VERSION_1_1_DEPRECATED
8549  cl_int enqueueMarker(Event* event = NULL) const CL_EXT_SUFFIX__VERSION_1_1_DEPRECATED
8550  {
8551  cl_event tmp;
8552  cl_int err = detail::errHandler(
8553  ::clEnqueueMarker(
8554  object_,
8555  (event != NULL) ? &tmp : NULL),
8556  __ENQUEUE_MARKER_ERR);
8557 
8558  if (event != NULL && err == CL_SUCCESS)
8559  *event = tmp;
8560 
8561  return err;
8562  }
8563 
8564  CL_EXT_PREFIX__VERSION_1_1_DEPRECATED
8565  cl_int enqueueWaitForEvents(const vector<Event>& events) const CL_EXT_SUFFIX__VERSION_1_1_DEPRECATED
8566  {
8567  return detail::errHandler(
8568  ::clEnqueueWaitForEvents(
8569  object_,
8570  (cl_uint) events.size(),
8571  events.size() > 0 ? (const cl_event*) &events.front() : NULL),
8572  __ENQUEUE_WAIT_FOR_EVENTS_ERR);
8573  }
8574 #endif // defined(CL_USE_DEPRECATED_OPENCL_1_1_APIS)
8575 
8576  cl_int enqueueAcquireGLObjects(
8577  const vector<Memory>* mem_objects = NULL,
8578  const vector<Event>* events = NULL,
8579  Event* event = NULL) const
8580  {
8581  cl_event tmp;
8582  cl_int err = detail::errHandler(
8583  ::clEnqueueAcquireGLObjects(
8584  object_,
8585  (mem_objects != NULL) ? (cl_uint) mem_objects->size() : 0,
8586  (mem_objects != NULL && mem_objects->size() > 0) ? (const cl_mem *) &mem_objects->front(): NULL,
8587  (events != NULL) ? (cl_uint) events->size() : 0,
8588  (events != NULL && events->size() > 0) ? (cl_event*) &events->front() : NULL,
8589  (event != NULL) ? &tmp : NULL),
8590  __ENQUEUE_ACQUIRE_GL_ERR);
8591 
8592  if (event != NULL && err == CL_SUCCESS)
8593  *event = tmp;
8594 
8595  return err;
8596  }
8597 
8598  cl_int enqueueReleaseGLObjects(
8599  const vector<Memory>* mem_objects = NULL,
8600  const vector<Event>* events = NULL,
8601  Event* event = NULL) const
8602  {
8603  cl_event tmp;
8604  cl_int err = detail::errHandler(
8605  ::clEnqueueReleaseGLObjects(
8606  object_,
8607  (mem_objects != NULL) ? (cl_uint) mem_objects->size() : 0,
8608  (mem_objects != NULL && mem_objects->size() > 0) ? (const cl_mem *) &mem_objects->front(): NULL,
8609  (events != NULL) ? (cl_uint) events->size() : 0,
8610  (events != NULL && events->size() > 0) ? (cl_event*) &events->front() : NULL,
8611  (event != NULL) ? &tmp : NULL),
8612  __ENQUEUE_RELEASE_GL_ERR);
8613 
8614  if (event != NULL && err == CL_SUCCESS)
8615  *event = tmp;
8616 
8617  return err;
8618  }
8619 
8620 #if defined (CL_HPP_USE_DX_INTEROP)
8621 typedef CL_API_ENTRY cl_int (CL_API_CALL *PFN_clEnqueueAcquireD3D10ObjectsKHR)(
8622  cl_command_queue command_queue, cl_uint num_objects,
8623  const cl_mem* mem_objects, cl_uint num_events_in_wait_list,
8624  const cl_event* event_wait_list, cl_event* event);
8625 typedef CL_API_ENTRY cl_int (CL_API_CALL *PFN_clEnqueueReleaseD3D10ObjectsKHR)(
8626  cl_command_queue command_queue, cl_uint num_objects,
8627  const cl_mem* mem_objects, cl_uint num_events_in_wait_list,
8628  const cl_event* event_wait_list, cl_event* event);
8629 
8630  cl_int enqueueAcquireD3D10Objects(
8631  const vector<Memory>* mem_objects = NULL,
8632  const vector<Event>* events = NULL,
8633  Event* event = NULL) const
8634  {
8635  static PFN_clEnqueueAcquireD3D10ObjectsKHR pfn_clEnqueueAcquireD3D10ObjectsKHR = NULL;
8636 #if CL_HPP_TARGET_OPENCL_VERSION >= 120
8637  cl_context context = getInfo<CL_QUEUE_CONTEXT>();
8638  cl::Device device(getInfo<CL_QUEUE_DEVICE>());
8639  cl_platform_id platform = device.getInfo<CL_DEVICE_PLATFORM>();
8640  CL_HPP_INIT_CL_EXT_FCN_PTR_PLATFORM_(platform, clEnqueueAcquireD3D10ObjectsKHR);
8641 #endif
8642 #if CL_HPP_TARGET_OPENCL_VERSION >= 110
8643  CL_HPP_INIT_CL_EXT_FCN_PTR_(clEnqueueAcquireD3D10ObjectsKHR);
8644 #endif
8645 
8646  cl_event tmp;
8647  cl_int err = detail::errHandler(
8648  pfn_clEnqueueAcquireD3D10ObjectsKHR(
8649  object_,
8650  (mem_objects != NULL) ? (cl_uint) mem_objects->size() : 0,
8651  (mem_objects != NULL && mem_objects->size() > 0) ? (const cl_mem *) &mem_objects->front(): NULL,
8652  (events != NULL) ? (cl_uint) events->size() : 0,
8653  (events != NULL) ? (cl_event*) &events->front() : NULL,
8654  (event != NULL) ? &tmp : NULL),
8655  __ENQUEUE_ACQUIRE_GL_ERR);
8656 
8657  if (event != NULL && err == CL_SUCCESS)
8658  *event = tmp;
8659 
8660  return err;
8661  }
8662 
8663  cl_int enqueueReleaseD3D10Objects(
8664  const vector<Memory>* mem_objects = NULL,
8665  const vector<Event>* events = NULL,
8666  Event* event = NULL) const
8667  {
8668  static PFN_clEnqueueReleaseD3D10ObjectsKHR pfn_clEnqueueReleaseD3D10ObjectsKHR = NULL;
8669 #if CL_HPP_TARGET_OPENCL_VERSION >= 120
8670  cl_context context = getInfo<CL_QUEUE_CONTEXT>();
8671  cl::Device device(getInfo<CL_QUEUE_DEVICE>());
8672  cl_platform_id platform = device.getInfo<CL_DEVICE_PLATFORM>();
8673  CL_HPP_INIT_CL_EXT_FCN_PTR_PLATFORM_(platform, clEnqueueReleaseD3D10ObjectsKHR);
8674 #endif // CL_HPP_TARGET_OPENCL_VERSION >= 120
8675 #if CL_HPP_TARGET_OPENCL_VERSION >= 110
8676  CL_HPP_INIT_CL_EXT_FCN_PTR_(clEnqueueReleaseD3D10ObjectsKHR);
8677 #endif // CL_HPP_TARGET_OPENCL_VERSION >= 110
8678 
8679  cl_event tmp;
8680  cl_int err = detail::errHandler(
8681  pfn_clEnqueueReleaseD3D10ObjectsKHR(
8682  object_,
8683  (mem_objects != NULL) ? (cl_uint) mem_objects->size() : 0,
8684  (mem_objects != NULL && mem_objects->size() > 0) ? (const cl_mem *) &mem_objects->front(): NULL,
8685  (events != NULL) ? (cl_uint) events->size() : 0,
8686  (events != NULL && events->size() > 0) ? (cl_event*) &events->front() : NULL,
8687  (event != NULL) ? &tmp : NULL),
8688  __ENQUEUE_RELEASE_GL_ERR);
8689 
8690  if (event != NULL && err == CL_SUCCESS)
8691  *event = tmp;
8692 
8693  return err;
8694  }
8695 #endif
8696 
8700 #if defined(CL_USE_DEPRECATED_OPENCL_1_1_APIS)
8701  CL_EXT_PREFIX__VERSION_1_1_DEPRECATED
8702  cl_int enqueueBarrier() const CL_EXT_SUFFIX__VERSION_1_1_DEPRECATED
8703  {
8704  return detail::errHandler(
8705  ::clEnqueueBarrier(object_),
8706  __ENQUEUE_BARRIER_ERR);
8707  }
8708 #endif // CL_USE_DEPRECATED_OPENCL_1_1_APIS
8709 
8710  cl_int flush() const
8711  {
8712  return detail::errHandler(::clFlush(object_), __FLUSH_ERR);
8713  }
8714 
8715  cl_int finish() const
8716  {
8717  return detail::errHandler(::clFinish(object_), __FINISH_ERR);
8718  }
8719 }; // CommandQueue
8720 
8721 CL_HPP_DEFINE_STATIC_MEMBER_ std::once_flag CommandQueue::default_initialized_;
8722 CL_HPP_DEFINE_STATIC_MEMBER_ CommandQueue CommandQueue::default_;
8723 CL_HPP_DEFINE_STATIC_MEMBER_ cl_int CommandQueue::default_error_ = CL_SUCCESS;
8724 
8725 
8726 #if CL_HPP_TARGET_OPENCL_VERSION >= 200
8727 enum class DeviceQueueProperties : cl_command_queue_properties
8728 {
8729  None = 0,
8730  Profiling = CL_QUEUE_PROFILING_ENABLE,
8731 };
8732 
8733 inline DeviceQueueProperties operator|(DeviceQueueProperties lhs, DeviceQueueProperties rhs)
8734 {
8735  return static_cast<DeviceQueueProperties>(static_cast<cl_command_queue_properties>(lhs) | static_cast<cl_command_queue_properties>(rhs));
8736 }
8737 
8741 class DeviceCommandQueue : public detail::Wrapper<cl_command_queue>
8742 {
8743 public:
8744 
8749 
8753  DeviceCommandQueue(DeviceQueueProperties properties, cl_int* err = NULL)
8754  {
8755  cl_int error;
8758 
8759  cl_command_queue_properties mergedProperties =
8760  CL_QUEUE_OUT_OF_ORDER_EXEC_MODE_ENABLE | CL_QUEUE_ON_DEVICE | static_cast<cl_command_queue_properties>(properties);
8761 
8762  cl_queue_properties queue_properties[] = {
8763  CL_QUEUE_PROPERTIES, mergedProperties, 0 };
8764  object_ = ::clCreateCommandQueueWithProperties(
8765  context(), device(), queue_properties, &error);
8766 
8767  detail::errHandler(error, __CREATE_COMMAND_QUEUE_WITH_PROPERTIES_ERR);
8768  if (err != NULL) {
8769  *err = error;
8770  }
8771  }
8772 
8777  const Context& context,
8778  const Device& device,
8779  DeviceQueueProperties properties = DeviceQueueProperties::None,
8780  cl_int* err = NULL)
8781  {
8782  cl_int error;
8783 
8784  cl_command_queue_properties mergedProperties =
8785  CL_QUEUE_OUT_OF_ORDER_EXEC_MODE_ENABLE | CL_QUEUE_ON_DEVICE | static_cast<cl_command_queue_properties>(properties);
8786  cl_queue_properties queue_properties[] = {
8787  CL_QUEUE_PROPERTIES, mergedProperties, 0 };
8788  object_ = ::clCreateCommandQueueWithProperties(
8789  context(), device(), queue_properties, &error);
8790 
8791  detail::errHandler(error, __CREATE_COMMAND_QUEUE_WITH_PROPERTIES_ERR);
8792  if (err != NULL) {
8793  *err = error;
8794  }
8795  }
8796 
8801  const Context& context,
8802  const Device& device,
8803  cl_uint queueSize,
8804  DeviceQueueProperties properties = DeviceQueueProperties::None,
8805  cl_int* err = NULL)
8806  {
8807  cl_int error;
8808 
8809  cl_command_queue_properties mergedProperties =
8810  CL_QUEUE_OUT_OF_ORDER_EXEC_MODE_ENABLE | CL_QUEUE_ON_DEVICE | static_cast<cl_command_queue_properties>(properties);
8811  cl_queue_properties queue_properties[] = {
8812  CL_QUEUE_PROPERTIES, mergedProperties,
8813  CL_QUEUE_SIZE, queueSize,
8814  0 };
8815  object_ = ::clCreateCommandQueueWithProperties(
8816  context(), device(), queue_properties, &error);
8817 
8818  detail::errHandler(error, __CREATE_COMMAND_QUEUE_WITH_PROPERTIES_ERR);
8819  if (err != NULL) {
8820  *err = error;
8821  }
8822  }
8823 
8830  explicit DeviceCommandQueue(const cl_command_queue& commandQueue, bool retainObject = false) :
8831  detail::Wrapper<cl_type>(commandQueue, retainObject) { }
8832 
8833  DeviceCommandQueue& operator = (const cl_command_queue& rhs)
8834  {
8836  return *this;
8837  }
8838 
8842  DeviceCommandQueue(const DeviceCommandQueue& queue) : detail::Wrapper<cl_type>(queue) {}
8843 
8847  DeviceCommandQueue& operator = (const DeviceCommandQueue &queue)
8848  {
8850  return *this;
8851  }
8852 
8856  DeviceCommandQueue(DeviceCommandQueue&& queue) CL_HPP_NOEXCEPT_ : detail::Wrapper<cl_type>(std::move(queue)) {}
8857 
8862  {
8863  detail::Wrapper<cl_type>::operator=(std::move(queue));
8864  return *this;
8865  }
8866 
8867  template <typename T>
8868  cl_int getInfo(cl_command_queue_info name, T* param) const
8869  {
8870  return detail::errHandler(
8871  detail::getInfo(
8872  &::clGetCommandQueueInfo, object_, name, param),
8873  __GET_COMMAND_QUEUE_INFO_ERR);
8874  }
8875 
8876  template <cl_command_queue_info name> typename
8877  detail::param_traits<detail::cl_command_queue_info, name>::param_type
8878  getInfo(cl_int* err = NULL) const
8879  {
8880  typename detail::param_traits<
8881  detail::cl_command_queue_info, name>::param_type param;
8882  cl_int result = getInfo(name, &param);
8883  if (err != NULL) {
8884  *err = result;
8885  }
8886  return param;
8887  }
8888 
8896  cl_int *err = nullptr)
8897  {
8898  cl_int error;
8901 
8902  cl_command_queue_properties properties =
8903  CL_QUEUE_OUT_OF_ORDER_EXEC_MODE_ENABLE | CL_QUEUE_ON_DEVICE | CL_QUEUE_ON_DEVICE_DEFAULT;
8904  cl_queue_properties queue_properties[] = {
8905  CL_QUEUE_PROPERTIES, properties,
8906  0 };
8907  DeviceCommandQueue deviceQueue(
8908  ::clCreateCommandQueueWithProperties(
8909  context(), device(), queue_properties, &error));
8910 
8911  detail::errHandler(error, __CREATE_COMMAND_QUEUE_WITH_PROPERTIES_ERR);
8912  if (err != NULL) {
8913  *err = error;
8914  }
8915 
8916  return deviceQueue;
8917  }
8918 
8926  const Context &context, const Device &device, cl_int *err = nullptr)
8927  {
8928  cl_int error;
8929 
8930  cl_command_queue_properties properties =
8931  CL_QUEUE_OUT_OF_ORDER_EXEC_MODE_ENABLE | CL_QUEUE_ON_DEVICE | CL_QUEUE_ON_DEVICE_DEFAULT;
8932  cl_queue_properties queue_properties[] = {
8933  CL_QUEUE_PROPERTIES, properties,
8934  0 };
8935  DeviceCommandQueue deviceQueue(
8936  ::clCreateCommandQueueWithProperties(
8937  context(), device(), queue_properties, &error));
8938 
8939  detail::errHandler(error, __CREATE_COMMAND_QUEUE_WITH_PROPERTIES_ERR);
8940  if (err != NULL) {
8941  *err = error;
8942  }
8943 
8944  return deviceQueue;
8945  }
8946 
8954  const Context &context, const Device &device, cl_uint queueSize, cl_int *err = nullptr)
8955  {
8956  cl_int error;
8957 
8958  cl_command_queue_properties properties =
8959  CL_QUEUE_OUT_OF_ORDER_EXEC_MODE_ENABLE | CL_QUEUE_ON_DEVICE | CL_QUEUE_ON_DEVICE_DEFAULT;
8960  cl_queue_properties queue_properties[] = {
8961  CL_QUEUE_PROPERTIES, properties,
8962  CL_QUEUE_SIZE, queueSize,
8963  0 };
8964  DeviceCommandQueue deviceQueue(
8965  ::clCreateCommandQueueWithProperties(
8966  context(), device(), queue_properties, &error));
8967 
8968  detail::errHandler(error, __CREATE_COMMAND_QUEUE_WITH_PROPERTIES_ERR);
8969  if (err != NULL) {
8970  *err = error;
8971  }
8972 
8973  return deviceQueue;
8974  }
8975 
8976 
8977 
8978 #if CL_HPP_TARGET_OPENCL_VERSION >= 210
8979 
8985  static DeviceCommandQueue updateDefault(const Context &context, const Device &device, const DeviceCommandQueue &default_queue, cl_int *err = nullptr)
8986  {
8987  cl_int error;
8988  error = clSetDefaultDeviceCommandQueue(context.get(), device.get(), default_queue.get());
8989 
8990  detail::errHandler(error, __SET_DEFAULT_DEVICE_COMMAND_QUEUE_ERR);
8991  if (err != NULL) {
8992  *err = error;
8993  }
8994  return default_queue;
8995  }
8996 
9000  static DeviceCommandQueue getDefault(const CommandQueue &queue, cl_int * err = NULL)
9001  {
9002  return queue.getInfo<CL_QUEUE_DEVICE_DEFAULT>(err);
9003  }
9004 
9005 #endif // #if CL_HPP_TARGET_OPENCL_VERSION >= 210
9006 }; // DeviceCommandQueue
9007 
9008 namespace detail
9009 {
9010  // Specialization for device command queue
9011  template <>
9013  {
9014  static size_type size(const cl::DeviceCommandQueue&) { return sizeof(cl_command_queue); }
9015  static const cl_command_queue* ptr(const cl::DeviceCommandQueue& value) { return &(value()); }
9016  };
9017 } // namespace detail
9018 
9019 #endif // #if CL_HPP_TARGET_OPENCL_VERSION >= 200
9020 
9021 
9022 template< typename IteratorType >
9024  const Context &context,
9025  IteratorType startIterator,
9026  IteratorType endIterator,
9027  bool readOnly,
9028  bool useHostPtr,
9029  cl_int* err)
9030 {
9031  typedef typename std::iterator_traits<IteratorType>::value_type DataType;
9032  cl_int error;
9033 
9034  cl_mem_flags flags = 0;
9035  if( readOnly ) {
9036  flags |= CL_MEM_READ_ONLY;
9037  }
9038  else {
9039  flags |= CL_MEM_READ_WRITE;
9040  }
9041  if( useHostPtr ) {
9042  flags |= CL_MEM_USE_HOST_PTR;
9043  }
9044 
9045  size_type size = sizeof(DataType)*(endIterator - startIterator);
9046 
9047  if( useHostPtr ) {
9048  object_ = ::clCreateBuffer(context(), flags, size, static_cast<DataType*>(&*startIterator), &error);
9049  } else {
9050  object_ = ::clCreateBuffer(context(), flags, size, 0, &error);
9051  }
9052 
9053  detail::errHandler(error, __CREATE_BUFFER_ERR);
9054  if (err != NULL) {
9055  *err = error;
9056  }
9057 
9058  if( !useHostPtr ) {
9059  CommandQueue queue(context, 0, &error);
9060  detail::errHandler(error, __CREATE_BUFFER_ERR);
9061  if (err != NULL) {
9062  *err = error;
9063  }
9064 
9065  error = cl::copy(queue, startIterator, endIterator, *this);
9066  detail::errHandler(error, __CREATE_BUFFER_ERR);
9067  if (err != NULL) {
9068  *err = error;
9069  }
9070  }
9071 }
9072 
9073 template< typename IteratorType >
9075  const CommandQueue &queue,
9076  IteratorType startIterator,
9077  IteratorType endIterator,
9078  bool readOnly,
9079  bool useHostPtr,
9080  cl_int* err)
9081 {
9082  typedef typename std::iterator_traits<IteratorType>::value_type DataType;
9083  cl_int error;
9084 
9085  cl_mem_flags flags = 0;
9086  if (readOnly) {
9087  flags |= CL_MEM_READ_ONLY;
9088  }
9089  else {
9090  flags |= CL_MEM_READ_WRITE;
9091  }
9092  if (useHostPtr) {
9093  flags |= CL_MEM_USE_HOST_PTR;
9094  }
9095 
9096  size_type size = sizeof(DataType)*(endIterator - startIterator);
9097 
9098  Context context = queue.getInfo<CL_QUEUE_CONTEXT>();
9099 
9100  if (useHostPtr) {
9101  object_ = ::clCreateBuffer(context(), flags, size, static_cast<DataType*>(&*startIterator), &error);
9102  }
9103  else {
9104  object_ = ::clCreateBuffer(context(), flags, size, 0, &error);
9105  }
9106 
9107  detail::errHandler(error, __CREATE_BUFFER_ERR);
9108  if (err != NULL) {
9109  *err = error;
9110  }
9111 
9112  if (!useHostPtr) {
9113  error = cl::copy(queue, startIterator, endIterator, *this);
9114  detail::errHandler(error, __CREATE_BUFFER_ERR);
9115  if (err != NULL) {
9116  *err = error;
9117  }
9118  }
9119 }
9120 
9121 inline cl_int enqueueReadBuffer(
9122  const Buffer& buffer,
9123  cl_bool blocking,
9124  size_type offset,
9125  size_type size,
9126  void* ptr,
9127  const vector<Event>* events = NULL,
9128  Event* event = NULL)
9129 {
9130  cl_int error;
9131  CommandQueue queue = CommandQueue::getDefault(&error);
9132 
9133  if (error != CL_SUCCESS) {
9134  return error;
9135  }
9136 
9137  return queue.enqueueReadBuffer(buffer, blocking, offset, size, ptr, events, event);
9138 }
9139 
9140 inline cl_int enqueueWriteBuffer(
9141  const Buffer& buffer,
9142  cl_bool blocking,
9143  size_type offset,
9144  size_type size,
9145  const void* ptr,
9146  const vector<Event>* events = NULL,
9147  Event* event = NULL)
9148 {
9149  cl_int error;
9150  CommandQueue queue = CommandQueue::getDefault(&error);
9151 
9152  if (error != CL_SUCCESS) {
9153  return error;
9154  }
9155 
9156  return queue.enqueueWriteBuffer(buffer, blocking, offset, size, ptr, events, event);
9157 }
9158 
9159 inline void* enqueueMapBuffer(
9160  const Buffer& buffer,
9161  cl_bool blocking,
9162  cl_map_flags flags,
9163  size_type offset,
9164  size_type size,
9165  const vector<Event>* events = NULL,
9166  Event* event = NULL,
9167  cl_int* err = NULL)
9168 {
9169  cl_int error;
9170  CommandQueue queue = CommandQueue::getDefault(&error);
9171  detail::errHandler(error, __ENQUEUE_MAP_BUFFER_ERR);
9172  if (err != NULL) {
9173  *err = error;
9174  }
9175 
9176  void * result = ::clEnqueueMapBuffer(
9177  queue(), buffer(), blocking, flags, offset, size,
9178  (events != NULL) ? (cl_uint) events->size() : 0,
9179  (events != NULL && events->size() > 0) ? (cl_event*) &events->front() : NULL,
9180  (cl_event*) event,
9181  &error);
9182 
9183  detail::errHandler(error, __ENQUEUE_MAP_BUFFER_ERR);
9184  if (err != NULL) {
9185  *err = error;
9186  }
9187  return result;
9188 }
9189 
9190 
9191 #if CL_HPP_TARGET_OPENCL_VERSION >= 200
9192 
9197 template<typename T>
9198 inline cl_int enqueueMapSVM(
9199  T* ptr,
9200  cl_bool blocking,
9201  cl_map_flags flags,
9202  size_type size,
9203  const vector<Event>* events,
9204  Event* event)
9205 {
9206  cl_int error;
9207  CommandQueue queue = CommandQueue::getDefault(&error);
9208  if (error != CL_SUCCESS) {
9209  return detail::errHandler(error, __ENQUEUE_MAP_BUFFER_ERR);
9210  }
9211 
9212  return queue.enqueueMapSVM(
9213  ptr, blocking, flags, size, events, event);
9214 }
9215 
9221 template<typename T, class D>
9222 inline cl_int enqueueMapSVM(
9223  cl::pointer<T, D> ptr,
9224  cl_bool blocking,
9225  cl_map_flags flags,
9226  size_type size,
9227  const vector<Event>* events = NULL,
9228  Event* event = NULL)
9229 {
9230  cl_int error;
9231  CommandQueue queue = CommandQueue::getDefault(&error);
9232  if (error != CL_SUCCESS) {
9233  return detail::errHandler(error, __ENQUEUE_MAP_BUFFER_ERR);
9234  }
9235 
9236  return queue.enqueueMapSVM(
9237  ptr, blocking, flags, size, events, event);
9238 }
9239 
9245 template<typename T, class Alloc>
9246 inline cl_int enqueueMapSVM(
9247  cl::vector<T, Alloc> container,
9248  cl_bool blocking,
9249  cl_map_flags flags,
9250  const vector<Event>* events = NULL,
9251  Event* event = NULL)
9252 {
9253  cl_int error;
9254  CommandQueue queue = CommandQueue::getDefault(&error);
9255  if (error != CL_SUCCESS) {
9256  return detail::errHandler(error, __ENQUEUE_MAP_BUFFER_ERR);
9257  }
9258 
9259  return queue.enqueueMapSVM(
9260  container, blocking, flags, events, event);
9261 }
9262 
9263 #endif // #if CL_HPP_TARGET_OPENCL_VERSION >= 200
9264 
9265 inline cl_int enqueueUnmapMemObject(
9266  const Memory& memory,
9267  void* mapped_ptr,
9268  const vector<Event>* events = NULL,
9269  Event* event = NULL)
9270 {
9271  cl_int error;
9272  CommandQueue queue = CommandQueue::getDefault(&error);
9273  detail::errHandler(error, __ENQUEUE_MAP_BUFFER_ERR);
9274  if (error != CL_SUCCESS) {
9275  return error;
9276  }
9277 
9278  cl_event tmp;
9279  cl_int err = detail::errHandler(
9280  ::clEnqueueUnmapMemObject(
9281  queue(), memory(), mapped_ptr,
9282  (events != NULL) ? (cl_uint)events->size() : 0,
9283  (events != NULL && events->size() > 0) ? (cl_event*)&events->front() : NULL,
9284  (event != NULL) ? &tmp : NULL),
9285  __ENQUEUE_UNMAP_MEM_OBJECT_ERR);
9286 
9287  if (event != NULL && err == CL_SUCCESS)
9288  *event = tmp;
9289 
9290  return err;
9291 }
9292 
9293 #if CL_HPP_TARGET_OPENCL_VERSION >= 200
9294 
9299 template<typename T>
9300 inline cl_int enqueueUnmapSVM(
9301  T* ptr,
9302  const vector<Event>* events = NULL,
9303  Event* event = NULL)
9304 {
9305  cl_int error;
9306  CommandQueue queue = CommandQueue::getDefault(&error);
9307  if (error != CL_SUCCESS) {
9308  return detail::errHandler(error, __ENQUEUE_UNMAP_MEM_OBJECT_ERR);
9309  }
9310 
9311  return detail::errHandler(queue.enqueueUnmapSVM(ptr, events, event),
9312  __ENQUEUE_UNMAP_MEM_OBJECT_ERR);
9313 
9314 }
9315 
9321 template<typename T, class D>
9322 inline cl_int enqueueUnmapSVM(
9323  cl::pointer<T, D> &ptr,
9324  const vector<Event>* events = NULL,
9325  Event* event = NULL)
9326 {
9327  cl_int error;
9328  CommandQueue queue = CommandQueue::getDefault(&error);
9329  if (error != CL_SUCCESS) {
9330  return detail::errHandler(error, __ENQUEUE_UNMAP_MEM_OBJECT_ERR);
9331  }
9332 
9333  return detail::errHandler(queue.enqueueUnmapSVM(ptr, events, event),
9334  __ENQUEUE_UNMAP_MEM_OBJECT_ERR);
9335 }
9336 
9342 template<typename T, class Alloc>
9343 inline cl_int enqueueUnmapSVM(
9344  cl::vector<T, Alloc> &container,
9345  const vector<Event>* events = NULL,
9346  Event* event = NULL)
9347 {
9348  cl_int error;
9349  CommandQueue queue = CommandQueue::getDefault(&error);
9350  if (error != CL_SUCCESS) {
9351  return detail::errHandler(error, __ENQUEUE_UNMAP_MEM_OBJECT_ERR);
9352  }
9353 
9354  return detail::errHandler(queue.enqueueUnmapSVM(container, events, event),
9355  __ENQUEUE_UNMAP_MEM_OBJECT_ERR);
9356 }
9357 
9358 #endif // #if CL_HPP_TARGET_OPENCL_VERSION >= 200
9359 
9360 inline cl_int enqueueCopyBuffer(
9361  const Buffer& src,
9362  const Buffer& dst,
9363  size_type src_offset,
9364  size_type dst_offset,
9365  size_type size,
9366  const vector<Event>* events = NULL,
9367  Event* event = NULL)
9368 {
9369  cl_int error;
9370  CommandQueue queue = CommandQueue::getDefault(&error);
9371 
9372  if (error != CL_SUCCESS) {
9373  return error;
9374  }
9375 
9376  return queue.enqueueCopyBuffer(src, dst, src_offset, dst_offset, size, events, event);
9377 }
9378 
9384 template< typename IteratorType >
9385 inline cl_int copy( IteratorType startIterator, IteratorType endIterator, cl::Buffer &buffer )
9386 {
9387  cl_int error;
9388  CommandQueue queue = CommandQueue::getDefault(&error);
9389  if (error != CL_SUCCESS)
9390  return error;
9391 
9392  return cl::copy(queue, startIterator, endIterator, buffer);
9393 }
9394 
9400 template< typename IteratorType >
9401 inline cl_int copy( const cl::Buffer &buffer, IteratorType startIterator, IteratorType endIterator )
9402 {
9403  cl_int error;
9404  CommandQueue queue = CommandQueue::getDefault(&error);
9405  if (error != CL_SUCCESS)
9406  return error;
9407 
9408  return cl::copy(queue, buffer, startIterator, endIterator);
9409 }
9410 
9416 template< typename IteratorType >
9417 inline cl_int copy( const CommandQueue &queue, IteratorType startIterator, IteratorType endIterator, cl::Buffer &buffer )
9418 {
9419  typedef typename std::iterator_traits<IteratorType>::value_type DataType;
9420  cl_int error;
9421 
9422  size_type length = endIterator-startIterator;
9423  size_type byteLength = length*sizeof(DataType);
9424 
9425  DataType *pointer =
9426  static_cast<DataType*>(queue.enqueueMapBuffer(buffer, CL_TRUE, CL_MAP_WRITE, 0, byteLength, 0, 0, &error));
9427  // if exceptions enabled, enqueueMapBuffer will throw
9428  if( error != CL_SUCCESS ) {
9429  return error;
9430  }
9431 #if defined(_MSC_VER)
9432  std::copy(
9433  startIterator,
9434  endIterator,
9435  stdext::checked_array_iterator<DataType*>(
9436  pointer, length));
9437 #else
9438  std::copy(startIterator, endIterator, pointer);
9439 #endif
9440  Event endEvent;
9441  error = queue.enqueueUnmapMemObject(buffer, pointer, 0, &endEvent);
9442  // if exceptions enabled, enqueueUnmapMemObject will throw
9443  if( error != CL_SUCCESS ) {
9444  return error;
9445  }
9446  endEvent.wait();
9447  return CL_SUCCESS;
9448 }
9449 
9455 template< typename IteratorType >
9456 inline cl_int copy( const CommandQueue &queue, const cl::Buffer &buffer, IteratorType startIterator, IteratorType endIterator )
9457 {
9458  typedef typename std::iterator_traits<IteratorType>::value_type DataType;
9459  cl_int error;
9460 
9461  size_type length = endIterator-startIterator;
9462  size_type byteLength = length*sizeof(DataType);
9463 
9464  DataType *pointer =
9465  static_cast<DataType*>(queue.enqueueMapBuffer(buffer, CL_TRUE, CL_MAP_READ, 0, byteLength, 0, 0, &error));
9466  // if exceptions enabled, enqueueMapBuffer will throw
9467  if( error != CL_SUCCESS ) {
9468  return error;
9469  }
9470  std::copy(pointer, pointer + length, startIterator);
9471  Event endEvent;
9472  error = queue.enqueueUnmapMemObject(buffer, pointer, 0, &endEvent);
9473  // if exceptions enabled, enqueueUnmapMemObject will throw
9474  if( error != CL_SUCCESS ) {
9475  return error;
9476  }
9477  endEvent.wait();
9478  return CL_SUCCESS;
9479 }
9480 
9481 
9482 #if CL_HPP_TARGET_OPENCL_VERSION >= 200
9483 
9486 template<typename T, class Alloc>
9487 inline cl_int mapSVM(cl::vector<T, Alloc> &container)
9488 {
9489  return enqueueMapSVM(container, CL_TRUE, CL_MAP_READ | CL_MAP_WRITE);
9490 }
9491 
9495 template<typename T, class Alloc>
9496 inline cl_int unmapSVM(cl::vector<T, Alloc> &container)
9497 {
9498  return enqueueUnmapSVM(container);
9499 }
9500 
9501 #endif // #if CL_HPP_TARGET_OPENCL_VERSION >= 200
9502 
9503 #if CL_HPP_TARGET_OPENCL_VERSION >= 110
9504 inline cl_int enqueueReadBufferRect(
9505  const Buffer& buffer,
9506  cl_bool blocking,
9507  const array<size_type, 3>& buffer_offset,
9508  const array<size_type, 3>& host_offset,
9509  const array<size_type, 3>& region,
9510  size_type buffer_row_pitch,
9511  size_type buffer_slice_pitch,
9512  size_type host_row_pitch,
9513  size_type host_slice_pitch,
9514  void *ptr,
9515  const vector<Event>* events = NULL,
9516  Event* event = NULL)
9517 {
9518  cl_int error;
9519  CommandQueue queue = CommandQueue::getDefault(&error);
9520 
9521  if (error != CL_SUCCESS) {
9522  return error;
9523  }
9524 
9525  return queue.enqueueReadBufferRect(
9526  buffer,
9527  blocking,
9528  buffer_offset,
9529  host_offset,
9530  region,
9531  buffer_row_pitch,
9532  buffer_slice_pitch,
9533  host_row_pitch,
9534  host_slice_pitch,
9535  ptr,
9536  events,
9537  event);
9538 }
9539 
9540 inline cl_int enqueueWriteBufferRect(
9541  const Buffer& buffer,
9542  cl_bool blocking,
9543  const array<size_type, 3>& buffer_offset,
9544  const array<size_type, 3>& host_offset,
9545  const array<size_type, 3>& region,
9546  size_type buffer_row_pitch,
9547  size_type buffer_slice_pitch,
9548  size_type host_row_pitch,
9549  size_type host_slice_pitch,
9550  const void *ptr,
9551  const vector<Event>* events = NULL,
9552  Event* event = NULL)
9553 {
9554  cl_int error;
9555  CommandQueue queue = CommandQueue::getDefault(&error);
9556 
9557  if (error != CL_SUCCESS) {
9558  return error;
9559  }
9560 
9561  return queue.enqueueWriteBufferRect(
9562  buffer,
9563  blocking,
9564  buffer_offset,
9565  host_offset,
9566  region,
9567  buffer_row_pitch,
9568  buffer_slice_pitch,
9569  host_row_pitch,
9570  host_slice_pitch,
9571  ptr,
9572  events,
9573  event);
9574 }
9575 
9576 inline cl_int enqueueCopyBufferRect(
9577  const Buffer& src,
9578  const Buffer& dst,
9579  const array<size_type, 3>& src_origin,
9580  const array<size_type, 3>& dst_origin,
9581  const array<size_type, 3>& region,
9582  size_type src_row_pitch,
9583  size_type src_slice_pitch,
9584  size_type dst_row_pitch,
9585  size_type dst_slice_pitch,
9586  const vector<Event>* events = NULL,
9587  Event* event = NULL)
9588 {
9589  cl_int error;
9590  CommandQueue queue = CommandQueue::getDefault(&error);
9591 
9592  if (error != CL_SUCCESS) {
9593  return error;
9594  }
9595 
9596  return queue.enqueueCopyBufferRect(
9597  src,
9598  dst,
9599  src_origin,
9600  dst_origin,
9601  region,
9602  src_row_pitch,
9603  src_slice_pitch,
9604  dst_row_pitch,
9605  dst_slice_pitch,
9606  events,
9607  event);
9608 }
9609 #endif // CL_HPP_TARGET_OPENCL_VERSION >= 110
9610 
9611 inline cl_int enqueueReadImage(
9612  const Image& image,
9613  cl_bool blocking,
9614  const array<size_type, 3>& origin,
9615  const array<size_type, 3>& region,
9616  size_type row_pitch,
9617  size_type slice_pitch,
9618  void* ptr,
9619  const vector<Event>* events = NULL,
9620  Event* event = NULL)
9621 {
9622  cl_int error;
9623  CommandQueue queue = CommandQueue::getDefault(&error);
9624 
9625  if (error != CL_SUCCESS) {
9626  return error;
9627  }
9628 
9629  return queue.enqueueReadImage(
9630  image,
9631  blocking,
9632  origin,
9633  region,
9634  row_pitch,
9635  slice_pitch,
9636  ptr,
9637  events,
9638  event);
9639 }
9640 
9641 inline cl_int enqueueWriteImage(
9642  const Image& image,
9643  cl_bool blocking,
9644  const array<size_type, 3>& origin,
9645  const array<size_type, 3>& region,
9646  size_type row_pitch,
9647  size_type slice_pitch,
9648  const void* ptr,
9649  const vector<Event>* events = NULL,
9650  Event* event = NULL)
9651 {
9652  cl_int error;
9653  CommandQueue queue = CommandQueue::getDefault(&error);
9654 
9655  if (error != CL_SUCCESS) {
9656  return error;
9657  }
9658 
9659  return queue.enqueueWriteImage(
9660  image,
9661  blocking,
9662  origin,
9663  region,
9664  row_pitch,
9665  slice_pitch,
9666  ptr,
9667  events,
9668  event);
9669 }
9670 
9671 inline cl_int enqueueCopyImage(
9672  const Image& src,
9673  const Image& dst,
9674  const array<size_type, 3>& src_origin,
9675  const array<size_type, 3>& dst_origin,
9676  const array<size_type, 3>& region,
9677  const vector<Event>* events = NULL,
9678  Event* event = NULL)
9679 {
9680  cl_int error;
9681  CommandQueue queue = CommandQueue::getDefault(&error);
9682 
9683  if (error != CL_SUCCESS) {
9684  return error;
9685  }
9686 
9687  return queue.enqueueCopyImage(
9688  src,
9689  dst,
9690  src_origin,
9691  dst_origin,
9692  region,
9693  events,
9694  event);
9695 }
9696 
9697 inline cl_int enqueueCopyImageToBuffer(
9698  const Image& src,
9699  const Buffer& dst,
9700  const array<size_type, 3>& src_origin,
9701  const array<size_type, 3>& region,
9702  size_type dst_offset,
9703  const vector<Event>* events = NULL,
9704  Event* event = NULL)
9705 {
9706  cl_int error;
9707  CommandQueue queue = CommandQueue::getDefault(&error);
9708 
9709  if (error != CL_SUCCESS) {
9710  return error;
9711  }
9712 
9713  return queue.enqueueCopyImageToBuffer(
9714  src,
9715  dst,
9716  src_origin,
9717  region,
9718  dst_offset,
9719  events,
9720  event);
9721 }
9722 
9723 inline cl_int enqueueCopyBufferToImage(
9724  const Buffer& src,
9725  const Image& dst,
9726  size_type src_offset,
9727  const array<size_type, 3>& dst_origin,
9728  const array<size_type, 3>& region,
9729  const vector<Event>* events = NULL,
9730  Event* event = NULL)
9731 {
9732  cl_int error;
9733  CommandQueue queue = CommandQueue::getDefault(&error);
9734 
9735  if (error != CL_SUCCESS) {
9736  return error;
9737  }
9738 
9739  return queue.enqueueCopyBufferToImage(
9740  src,
9741  dst,
9742  src_offset,
9743  dst_origin,
9744  region,
9745  events,
9746  event);
9747 }
9748 
9749 
9750 inline cl_int flush(void)
9751 {
9752  cl_int error;
9753  CommandQueue queue = CommandQueue::getDefault(&error);
9754 
9755  if (error != CL_SUCCESS) {
9756  return error;
9757  }
9758 
9759  return queue.flush();
9760 }
9761 
9762 inline cl_int finish(void)
9763 {
9764  cl_int error;
9765  CommandQueue queue = CommandQueue::getDefault(&error);
9766 
9767  if (error != CL_SUCCESS) {
9768  return error;
9769  }
9770 
9771 
9772  return queue.finish();
9773 }
9774 
9776 {
9777 private:
9778  CommandQueue queue_;
9779  const NDRange offset_;
9780  const NDRange global_;
9781  const NDRange local_;
9782  vector<Event> events_;
9783 
9784  template<typename... Ts>
9785  friend class KernelFunctor;
9786 
9787 public:
9788  EnqueueArgs(NDRange global) :
9789  queue_(CommandQueue::getDefault()),
9790  offset_(NullRange),
9791  global_(global),
9792  local_(NullRange)
9793  {
9794 
9795  }
9796 
9797  EnqueueArgs(NDRange global, NDRange local) :
9798  queue_(CommandQueue::getDefault()),
9799  offset_(NullRange),
9800  global_(global),
9801  local_(local)
9802  {
9803 
9804  }
9805 
9806  EnqueueArgs(NDRange offset, NDRange global, NDRange local) :
9807  queue_(CommandQueue::getDefault()),
9808  offset_(offset),
9809  global_(global),
9810  local_(local)
9811  {
9812 
9813  }
9814 
9815  EnqueueArgs(Event e, NDRange global) :
9816  queue_(CommandQueue::getDefault()),
9817  offset_(NullRange),
9818  global_(global),
9819  local_(NullRange)
9820  {
9821  events_.push_back(e);
9822  }
9823 
9824  EnqueueArgs(Event e, NDRange global, NDRange local) :
9825  queue_(CommandQueue::getDefault()),
9826  offset_(NullRange),
9827  global_(global),
9828  local_(local)
9829  {
9830  events_.push_back(e);
9831  }
9832 
9833  EnqueueArgs(Event e, NDRange offset, NDRange global, NDRange local) :
9834  queue_(CommandQueue::getDefault()),
9835  offset_(offset),
9836  global_(global),
9837  local_(local)
9838  {
9839  events_.push_back(e);
9840  }
9841 
9842  EnqueueArgs(const vector<Event> &events, NDRange global) :
9843  queue_(CommandQueue::getDefault()),
9844  offset_(NullRange),
9845  global_(global),
9846  local_(NullRange),
9847  events_(events)
9848  {
9849 
9850  }
9851 
9852  EnqueueArgs(const vector<Event> &events, NDRange global, NDRange local) :
9853  queue_(CommandQueue::getDefault()),
9854  offset_(NullRange),
9855  global_(global),
9856  local_(local),
9857  events_(events)
9858  {
9859 
9860  }
9861 
9862  EnqueueArgs(const vector<Event> &events, NDRange offset, NDRange global, NDRange local) :
9863  queue_(CommandQueue::getDefault()),
9864  offset_(offset),
9865  global_(global),
9866  local_(local),
9867  events_(events)
9868  {
9869 
9870  }
9871 
9872  EnqueueArgs(CommandQueue &queue, NDRange global) :
9873  queue_(queue),
9874  offset_(NullRange),
9875  global_(global),
9876  local_(NullRange)
9877  {
9878 
9879  }
9880 
9881  EnqueueArgs(CommandQueue &queue, NDRange global, NDRange local) :
9882  queue_(queue),
9883  offset_(NullRange),
9884  global_(global),
9885  local_(local)
9886  {
9887 
9888  }
9889 
9890  EnqueueArgs(CommandQueue &queue, NDRange offset, NDRange global, NDRange local) :
9891  queue_(queue),
9892  offset_(offset),
9893  global_(global),
9894  local_(local)
9895  {
9896 
9897  }
9898 
9899  EnqueueArgs(CommandQueue &queue, Event e, NDRange global) :
9900  queue_(queue),
9901  offset_(NullRange),
9902  global_(global),
9903  local_(NullRange)
9904  {
9905  events_.push_back(e);
9906  }
9907 
9908  EnqueueArgs(CommandQueue &queue, Event e, NDRange global, NDRange local) :
9909  queue_(queue),
9910  offset_(NullRange),
9911  global_(global),
9912  local_(local)
9913  {
9914  events_.push_back(e);
9915  }
9916 
9917  EnqueueArgs(CommandQueue &queue, Event e, NDRange offset, NDRange global, NDRange local) :
9918  queue_(queue),
9919  offset_(offset),
9920  global_(global),
9921  local_(local)
9922  {
9923  events_.push_back(e);
9924  }
9925 
9926  EnqueueArgs(CommandQueue &queue, const vector<Event> &events, NDRange global) :
9927  queue_(queue),
9928  offset_(NullRange),
9929  global_(global),
9930  local_(NullRange),
9931  events_(events)
9932  {
9933 
9934  }
9935 
9936  EnqueueArgs(CommandQueue &queue, const vector<Event> &events, NDRange global, NDRange local) :
9937  queue_(queue),
9938  offset_(NullRange),
9939  global_(global),
9940  local_(local),
9941  events_(events)
9942  {
9943 
9944  }
9945 
9946  EnqueueArgs(CommandQueue &queue, const vector<Event> &events, NDRange offset, NDRange global, NDRange local) :
9947  queue_(queue),
9948  offset_(offset),
9949  global_(global),
9950  local_(local),
9951  events_(events)
9952  {
9953 
9954  }
9955 };
9956 
9957 
9958 //----------------------------------------------------------------------------------------------
9959 
9960 
9965 template<typename... Ts>
9967 {
9968 private:
9969  Kernel kernel_;
9970 
9971  template<int index, typename T0, typename... T1s>
9972  void setArgs(T0&& t0, T1s&&... t1s)
9973  {
9974  kernel_.setArg(index, t0);
9975  setArgs<index + 1, T1s...>(std::forward<T1s>(t1s)...);
9976  }
9977 
9978  template<int index, typename T0>
9979  void setArgs(T0&& t0)
9980  {
9981  kernel_.setArg(index, t0);
9982  }
9983 
9984  template<int index>
9985  void setArgs()
9986  {
9987  }
9988 
9989 
9990 public:
9991  KernelFunctor(Kernel kernel) : kernel_(kernel)
9992  {}
9993 
9994  KernelFunctor(
9995  const Program& program,
9996  const string name,
9997  cl_int * err = NULL) :
9998  kernel_(program, name.c_str(), err)
9999  {}
10000 
10003 
10010  const EnqueueArgs& args,
10011  Ts... ts)
10012  {
10013  Event event;
10014  setArgs<0>(std::forward<Ts>(ts)...);
10015 
10016  args.queue_.enqueueNDRangeKernel(
10017  kernel_,
10018  args.offset_,
10019  args.global_,
10020  args.local_,
10021  &args.events_,
10022  &event);
10023 
10024  return event;
10025  }
10026 
10034  const EnqueueArgs& args,
10035  Ts... ts,
10036  cl_int &error)
10037  {
10038  Event event;
10039  setArgs<0>(std::forward<Ts>(ts)...);
10040 
10041  error = args.queue_.enqueueNDRangeKernel(
10042  kernel_,
10043  args.offset_,
10044  args.global_,
10045  args.local_,
10046  &args.events_,
10047  &event);
10048 
10049  return event;
10050  }
10051 
10052 #if CL_HPP_TARGET_OPENCL_VERSION >= 200
10053  cl_int setSVMPointers(const vector<void*> &pointerList)
10054  {
10055  return kernel_.setSVMPointers(pointerList);
10056  }
10057 
10058  template<typename T0, typename... T1s>
10059  cl_int setSVMPointers(const T0 &t0, T1s &... ts)
10060  {
10061  return kernel_.setSVMPointers(t0, ts...);
10062  }
10063 #endif // #if CL_HPP_TARGET_OPENCL_VERSION >= 200
10064 
10065  Kernel getKernel()
10066  {
10067  return kernel_;
10068  }
10069 };
10070 
10071 namespace compatibility {
10076  template<typename... Ts>
10078  {
10079  typedef KernelFunctor<Ts...> FunctorType;
10080 
10081  FunctorType functor_;
10082 
10083  make_kernel(
10084  const Program& program,
10085  const string name,
10086  cl_int * err = NULL) :
10087  functor_(FunctorType(program, name, err))
10088  {}
10089 
10090  make_kernel(
10091  const Kernel kernel) :
10092  functor_(FunctorType(kernel))
10093  {}
10094 
10097 
10099  typedef Event type_(
10100  const EnqueueArgs&,
10101  Ts...);
10102 
10103  Event operator()(
10104  const EnqueueArgs& enqueueArgs,
10105  Ts... args)
10106  {
10107  return functor_(
10108  enqueueArgs, args...);
10109  }
10110  };
10111 } // namespace compatibility
10112 
10113 
10114 //----------------------------------------------------------------------------------------------------------------------
10115 
10116 #undef CL_HPP_ERR_STR_
10117 #if !defined(CL_HPP_USER_OVERRIDE_ERROR_STRINGS)
10118 #undef __GET_DEVICE_INFO_ERR
10119 #undef __GET_PLATFORM_INFO_ERR
10120 #undef __GET_DEVICE_IDS_ERR
10121 #undef __GET_PLATFORM_IDS_ERR
10122 #undef __GET_CONTEXT_INFO_ERR
10123 #undef __GET_EVENT_INFO_ERR
10124 #undef __GET_EVENT_PROFILE_INFO_ERR
10125 #undef __GET_MEM_OBJECT_INFO_ERR
10126 #undef __GET_IMAGE_INFO_ERR
10127 #undef __GET_SAMPLER_INFO_ERR
10128 #undef __GET_KERNEL_INFO_ERR
10129 #undef __GET_KERNEL_ARG_INFO_ERR
10130 #undef __GET_KERNEL_SUB_GROUP_INFO_ERR
10131 #undef __GET_KERNEL_WORK_GROUP_INFO_ERR
10132 #undef __GET_PROGRAM_INFO_ERR
10133 #undef __GET_PROGRAM_BUILD_INFO_ERR
10134 #undef __GET_COMMAND_QUEUE_INFO_ERR
10135 #undef __CREATE_CONTEXT_ERR
10136 #undef __CREATE_CONTEXT_FROM_TYPE_ERR
10137 #undef __GET_SUPPORTED_IMAGE_FORMATS_ERR
10138 #undef __CREATE_BUFFER_ERR
10139 #undef __COPY_ERR
10140 #undef __CREATE_SUBBUFFER_ERR
10141 #undef __CREATE_GL_BUFFER_ERR
10142 #undef __CREATE_GL_RENDER_BUFFER_ERR
10143 #undef __GET_GL_OBJECT_INFO_ERR
10144 #undef __CREATE_IMAGE_ERR
10145 #undef __CREATE_GL_TEXTURE_ERR
10146 #undef __IMAGE_DIMENSION_ERR
10147 #undef __SET_MEM_OBJECT_DESTRUCTOR_CALLBACK_ERR
10148 #undef __CREATE_USER_EVENT_ERR
10149 #undef __SET_USER_EVENT_STATUS_ERR
10150 #undef __SET_EVENT_CALLBACK_ERR
10151 #undef __WAIT_FOR_EVENTS_ERR
10152 #undef __CREATE_KERNEL_ERR
10153 #undef __SET_KERNEL_ARGS_ERR
10154 #undef __CREATE_PROGRAM_WITH_SOURCE_ERR
10155 #undef __CREATE_PROGRAM_WITH_IL_ERR
10156 #undef __CREATE_PROGRAM_WITH_BINARY_ERR
10157 #undef __CREATE_PROGRAM_WITH_IL_ERR
10158 #undef __CREATE_PROGRAM_WITH_BUILT_IN_KERNELS_ERR
10159 #undef __BUILD_PROGRAM_ERR
10160 #undef __COMPILE_PROGRAM_ERR
10161 #undef __LINK_PROGRAM_ERR
10162 #undef __CREATE_KERNELS_IN_PROGRAM_ERR
10163 #undef __CREATE_COMMAND_QUEUE_WITH_PROPERTIES_ERR
10164 #undef __CREATE_SAMPLER_WITH_PROPERTIES_ERR
10165 #undef __SET_COMMAND_QUEUE_PROPERTY_ERR
10166 #undef __ENQUEUE_READ_BUFFER_ERR
10167 #undef __ENQUEUE_READ_BUFFER_RECT_ERR
10168 #undef __ENQUEUE_WRITE_BUFFER_ERR
10169 #undef __ENQUEUE_WRITE_BUFFER_RECT_ERR
10170 #undef __ENQEUE_COPY_BUFFER_ERR
10171 #undef __ENQEUE_COPY_BUFFER_RECT_ERR
10172 #undef __ENQUEUE_FILL_BUFFER_ERR
10173 #undef __ENQUEUE_READ_IMAGE_ERR
10174 #undef __ENQUEUE_WRITE_IMAGE_ERR
10175 #undef __ENQUEUE_COPY_IMAGE_ERR
10176 #undef __ENQUEUE_FILL_IMAGE_ERR
10177 #undef __ENQUEUE_COPY_IMAGE_TO_BUFFER_ERR
10178 #undef __ENQUEUE_COPY_BUFFER_TO_IMAGE_ERR
10179 #undef __ENQUEUE_MAP_BUFFER_ERR
10180 #undef __ENQUEUE_MAP_IMAGE_ERR
10181 #undef __ENQUEUE_UNMAP_MEM_OBJECT_ERR
10182 #undef __ENQUEUE_NDRANGE_KERNEL_ERR
10183 #undef __ENQUEUE_NATIVE_KERNEL
10184 #undef __ENQUEUE_MIGRATE_MEM_OBJECTS_ERR
10185 #undef __ENQUEUE_MIGRATE_SVM_ERR
10186 #undef __ENQUEUE_ACQUIRE_GL_ERR
10187 #undef __ENQUEUE_RELEASE_GL_ERR
10188 #undef __CREATE_PIPE_ERR
10189 #undef __GET_PIPE_INFO_ERR
10190 #undef __RETAIN_ERR
10191 #undef __RELEASE_ERR
10192 #undef __FLUSH_ERR
10193 #undef __FINISH_ERR
10194 #undef __VECTOR_CAPACITY_ERR
10195 #undef __CREATE_SUB_DEVICES_ERR
10196 #undef __CREATE_SUB_DEVICES_ERR
10197 #undef __ENQUEUE_MARKER_ERR
10198 #undef __ENQUEUE_WAIT_FOR_EVENTS_ERR
10199 #undef __ENQUEUE_BARRIER_ERR
10200 #undef __UNLOAD_COMPILER_ERR
10201 #undef __CREATE_GL_TEXTURE_2D_ERR
10202 #undef __CREATE_GL_TEXTURE_3D_ERR
10203 #undef __CREATE_IMAGE2D_ERR
10204 #undef __CREATE_IMAGE3D_ERR
10205 #undef __CREATE_COMMAND_QUEUE_ERR
10206 #undef __ENQUEUE_TASK_ERR
10207 #undef __CREATE_SAMPLER_ERR
10208 #undef __ENQUEUE_MARKER_WAIT_LIST_ERR
10209 #undef __ENQUEUE_BARRIER_WAIT_LIST_ERR
10210 #undef __CLONE_KERNEL_ERR
10211 #undef __GET_HOST_TIMER_ERR
10212 #undef __GET_DEVICE_AND_HOST_TIMER_ERR
10213 
10214 #endif //CL_HPP_USER_OVERRIDE_ERROR_STRINGS
10215 
10216 // Extensions
10217 #undef CL_HPP_INIT_CL_EXT_FCN_PTR_
10218 #undef CL_HPP_INIT_CL_EXT_FCN_PTR_PLATFORM_
10219 
10220 #if defined(CL_HPP_USE_CL_DEVICE_FISSION)
10221 #undef CL_HPP_PARAM_NAME_DEVICE_FISSION_
10222 #endif // CL_HPP_USE_CL_DEVICE_FISSION
10223 
10224 #undef CL_HPP_NOEXCEPT_
10225 #undef CL_HPP_DEFINE_STATIC_MEMBER_
10226 
10227 } // namespace cl
10228 
10229 #endif // CL_HPP_
cl::compatibility::make_kernel::result_type
Event result_type
Return type of the functor.
Definition: cl2.hpp:10096
cl::SVMTraitCoarse
Definition: cl2.hpp:3499
cl::Image2D::Image2D
Image2D(const Context &context, ImageFormat format, const Buffer &sourceBuffer, size_type width, size_type height, size_type row_pitch=0, cl_int *err=nullptr)
Constructs a 2D Image from a buffer.
Definition: cl2.hpp:4764
cl::Image3DGL::Image3DGL
Image3DGL(const cl_mem &image, bool retainObject=false)
Constructor from cl_mem - takes ownership.
Definition: cl2.hpp:5294
cl::copy
cl_int copy(IteratorType startIterator, IteratorType endIterator, cl::Buffer &buffer)
Definition: cl2.hpp:9385
cl::Image1DArray::Image1DArray
Image1DArray(const Image1DArray &img)
Copy constructor to forward copy to the superclass correctly. Required for MSVC.
Definition: cl2.hpp:4651
cl::CommandQueue::enqueueMigrateSVM
cl_int enqueueMigrateSVM(const cl::vector< cl::pointer< T, D >> &svmPointers, const cl::vector< size_type > &sizes, cl_mem_migration_flags flags=0, const vector< Event > *events=NULL, Event *event=NULL) const
Definition: cl2.hpp:8392
cl::Device::getDeviceAndHostTimer
std::pair< cl_ulong, cl_ulong > getDeviceAndHostTimer(cl_int *error=nullptr)
Definition: cl2.hpp:2241
cl::DeviceCommandQueue::DeviceCommandQueue
DeviceCommandQueue(const cl_command_queue &commandQueue, bool retainObject=false)
Constructor from cl_command_queue - takes ownership.
Definition: cl2.hpp:8830
cl::Pipe::getInfo
cl_int getInfo(cl_pipe_info name, T *param) const
Wrapper for clGetMemObjectInfo().
Definition: cl2.hpp:5536
cl::CommandQueue::CommandQueue
CommandQueue(const CommandQueue &queue)
Copy constructor to forward copy to the superclass correctly. Required for MSVC.
Definition: cl2.hpp:7462
cl::Device::getInfo
detail::param_traits< detail::cl_device_info, name >::param_type getInfo(cl_int *err=NULL) const
Wrapper for clGetDeviceInfo() that returns by value.
Definition: cl2.hpp:2198
cl::Event::getProfilingInfo
detail::param_traits< detail::cl_profiling_info, name >::param_type getProfilingInfo(cl_int *err=NULL) const
Wrapper for clGetEventProfilingInfo() that returns by value.
Definition: cl2.hpp:3207
cl::SVMAllocator::rebind
Definition: cl2.hpp:3568
cl::Program
Program interface that implements cl_program.
Definition: cl2.hpp:6167
cl::Pipe
Class interface for Pipe Memory Objects.
Definition: cl2.hpp:5428
cl::Sampler
Class interface for cl_sampler.
Definition: cl2.hpp:5569
cl::NDRange::size
size_type size() const
Returns the size of the object in bytes based on the.
Definition: cl2.hpp:5756
cl::BufferGL::BufferGL
BufferGL(const Context &context, cl_mem_flags flags, cl_GLuint bufobj, cl_int *err=NULL)
Constructs a BufferGL in a specified context, from a given GL buffer.
Definition: cl2.hpp:4145
cl::BufferRenderGL::BufferRenderGL
BufferRenderGL(const Context &context, cl_mem_flags flags, cl_GLuint bufobj, cl_int *err=NULL)
Constructs a BufferRenderGL in a specified context, from a given GL Renderbuffer.
Definition: cl2.hpp:4242
cl::Image2D::Image2D
Image2D(Image2D &&img) CL_HPP_NOEXCEPT_
Move constructor to forward move to the superclass correctly. Required for MSVC.
Definition: cl2.hpp:4907
cl::Event::setCallback
cl_int setCallback(cl_int type, void(CL_CALLBACK *pfn_notify)(cl_event, cl_int, void *), void *user_data=NULL)
Registers a user callback function for a specific command execution status.
Definition: cl2.hpp:3234
cl::Image1D::Image1D
Image1D(Image1D &&img) CL_HPP_NOEXCEPT_
Move constructor to forward move to the superclass correctly. Required for MSVC.
Definition: cl2.hpp:4492
cl::Platform::unloadCompiler
cl_int unloadCompiler()
Wrapper for clUnloadCompiler().
Definition: cl2.hpp:2698
cl::Kernel::setArg
std::enable_if< std::is_pointer< T >::value, cl_int >::type setArg(cl_uint index, const T argPtr)
setArg overload taking a pointer type
Definition: cl2.hpp:6018
cl::Context::getDefault
static Context getDefault(cl_int *err=NULL)
Returns a singleton context including all devices of CL_DEVICE_TYPE_DEFAULT.
Definition: cl2.hpp:2984
cl::Program::Program
Program(const cl_program &program, bool retainObject=false)
Constructor from cl_program - takes ownership.
Definition: cl2.hpp:6554
cl::detail::ReferenceHandler< cl_device_id >::retain
static cl_int retain(cl_device_id device)
Definition: cl2.hpp:1595
cl::Memory::Memory
Memory(const cl_mem &memory, bool retainObject)
Constructor from cl_mem - takes ownership.
Definition: cl2.hpp:3344
cl::Image1D::Image1D
Image1D(const Image1D &img)
Copy constructor to forward copy to the superclass correctly. Required for MSVC.
Definition: cl2.hpp:4478
cl::Image2DGL::Image2DGL
Image2DGL(Image2DGL &&img) CL_HPP_NOEXCEPT_
Move constructor to forward move to the superclass correctly. Required for MSVC.
Definition: cl2.hpp:5003
cl::CommandQueue::enqueueFillImage
cl_int enqueueFillImage(const Image &image, cl_uint4 fillColor, const array< size_type, 3 > &origin, const array< size_type, 3 > &region, const vector< Event > *events=NULL, Event *event=NULL) const
Definition: cl2.hpp:7911
cl::CommandQueue::enqueueFillImage
cl_int enqueueFillImage(const Image &image, cl_int4 fillColor, const array< size_type, 3 > &origin, const array< size_type, 3 > &region, const vector< Event > *events=NULL, Event *event=NULL) const
Definition: cl2.hpp:7877
cl::Buffer::Buffer
Buffer(const Buffer &buf)
Copy constructor to forward copy to the superclass correctly. Required for MSVC.
Definition: cl2.hpp:3965
cl::Platform::Platform
Platform()
Default constructor - initializes to NULL.
Definition: cl2.hpp:2426
cl::DeviceCommandQueue::getDefault
static DeviceCommandQueue getDefault(const CommandQueue &queue, cl_int *err=NULL)
Definition: cl2.hpp:9000
cl::CommandQueue::enqueueMigrateSVM
cl_int enqueueMigrateSVM(const cl::vector< cl::vector< T, Alloc >> &svmContainers, cl_mem_migration_flags flags=0, const vector< Event > *events=NULL, Event *event=NULL) const
Definition: cl2.hpp:8450
cl::Image2D::Image2D
Image2D()
Default constructor - initializes to NULL.
Definition: cl2.hpp:4868
cl::SVMTraitAtomic
Definition: cl2.hpp:3520
cl::Image3D::Image3D
Image3D()
Default constructor - initializes to NULL.
Definition: cl2.hpp:5193
cl::Context::Context
Context(Context &&ctx) CL_HPP_NOEXCEPT_
Move constructor to forward move to the superclass correctly. Required for MSVC.
Definition: cl2.hpp:2968
cl::UserEvent
Class interface for user events (a subset of cl_event's).
Definition: cl2.hpp:3269
cl::Platform::get
static cl_int get(vector< Platform > *platforms)
Gets a list of available platforms.
Definition: cl2.hpp:2630
cl::Sampler::Sampler
Sampler()
Default constructor - initializes to NULL.
Definition: cl2.hpp:5572
cl::Event::Event
Event(const cl_event &event, bool retainObject=false)
Constructor from cl_event - takes ownership.
Definition: cl2.hpp:3158
cl::Image3D::Image3D
Image3D(Image3D &&img) CL_HPP_NOEXCEPT_
Move constructor to forward move to the superclass correctly. Required for MSVC.
Definition: cl2.hpp:5232
cl::DeviceCommandQueue::DeviceCommandQueue
DeviceCommandQueue(const Context &context, const Device &device, DeviceQueueProperties properties=DeviceQueueProperties::None, cl_int *err=NULL)
Definition: cl2.hpp:8776
cl::fine_svm_vector
vector< T, cl::SVMAllocator< int, cl::SVMTraitFine<> >> fine_svm_vector
Vector alias to simplify contruction of fine-grained SVM containers.
Definition: cl2.hpp:3800
cl::Image3DGL::operator=
Image3DGL & operator=(const cl_mem &rhs)
Assignment from cl_mem - performs shallow copy.
Definition: cl2.hpp:5301
cl::Buffer
Class interface for Buffer Memory Objects.
Definition: cl2.hpp:3818
cl::Pipe::Pipe
Pipe(cl_uint packet_size, cl_uint max_packets, cl_int *err=NULL)
Constructs a Pipe in a the default context.
Definition: cl2.hpp:5465
cl::CommandQueue::CommandQueue
CommandQueue(const cl_command_queue &commandQueue, bool retainObject=false)
Constructor from cl_command_queue - takes ownership.
Definition: cl2.hpp:7450
cl::Pipe::getInfo
detail::param_traits< detail::cl_pipe_info, name >::param_type getInfo(cl_int *err=NULL) const
Wrapper for clGetMemObjectInfo() that returns by value.
Definition: cl2.hpp:5546
cl::Image3DGL::Image3DGL
Image3DGL(const Image3DGL &img)
Copy constructor to forward copy to the superclass correctly. Required for MSVC.
Definition: cl2.hpp:5310
cl::Program::Program
Program(const Context &context, const vector< Device > &devices, const string &kernelNames, cl_int *err=NULL)
Definition: cl2.hpp:6516
cl::Image3D::operator=
Image3D & operator=(const cl_mem &rhs)
Assignment from cl_mem - performs shallow copy.
Definition: cl2.hpp:5209
cl::Image2D::operator=
Image2D & operator=(const cl_mem &rhs)
Assignment from cl_mem - performs shallow copy.
Definition: cl2.hpp:4884
cl::DeviceCommandQueue::DeviceCommandQueue
DeviceCommandQueue(const DeviceCommandQueue &queue)
Copy constructor to forward copy to the superclass correctly. Required for MSVC.
Definition: cl2.hpp:8842
cl::Kernel::clone
Kernel clone()
Definition: cl2.hpp:6152
cl::Image1DBuffer
Image interface for 1D buffer images.
Definition: cl2.hpp:4509
cl::BufferRenderGL::BufferRenderGL
BufferRenderGL(const BufferRenderGL &buf)
Copy constructor to forward copy to the superclass correctly. Required for MSVC.
Definition: cl2.hpp:4287
cl::NDRange::dimensions
size_type dimensions() const
Queries the number of dimensions in the range.
Definition: cl2.hpp:5749
cl::DeviceCommandQueue::makeDefault
static DeviceCommandQueue makeDefault(cl_int *err=nullptr)
Definition: cl2.hpp:8895
cl::Program::Program
Program(const Sources &sources, cl_int *err=NULL)
Definition: cl2.hpp:6257
cl::Device::Device
Device(Device &&dev) CL_HPP_NOEXCEPT_
Move constructor to forward move to the superclass correctly. Required for MSVC.
Definition: cl2.hpp:2175
cl::Sampler::Sampler
Sampler(const Sampler &sam)
Copy constructor to forward copy to the superclass correctly. Required for MSVC.
Definition: cl2.hpp:5642
cl::CommandQueue::enqueueMigrateSVM
cl_int enqueueMigrateSVM(const cl::vector< cl::pointer< T, D >> &svmPointers, cl_mem_migration_flags flags=0, const vector< Event > *events=NULL, Event *event=NULL) const
Definition: cl2.hpp:8414
cl::enqueueUnmapSVM
cl_int enqueueUnmapSVM(T *ptr, const vector< Event > *events=NULL, Event *event=NULL)
Definition: cl2.hpp:9300
cl::Kernel::setSVMPointers
cl_int setSVMPointers(const vector< void * > &pointerList)
Definition: cl2.hpp:6053
cl::LocalSpaceArg
Local address wrapper for use with Kernel::setArg.
Definition: cl2.hpp:5777
cl::Memory::Memory
Memory(const Memory &mem)
Copy constructor to forward copy to the superclass correctly. Required for MSVC.
Definition: cl2.hpp:3361
cl::Context
Class interface for cl_context.
Definition: cl2.hpp:2737
cl::mapSVM
cl_int mapSVM(cl::vector< T, Alloc > &container)
Definition: cl2.hpp:9487
cl::Image::Image
Image(const Image &img)
Copy constructor to forward copy to the superclass correctly. Required for MSVC.
Definition: cl2.hpp:4358
cl::Program::Program
Program(Program &&program) CL_HPP_NOEXCEPT_
Move constructor to forward move to the superclass correctly. Required for MSVC.
Definition: cl2.hpp:6580
cl::SVMTraitFine
Definition: cl2.hpp:3509
cl::Buffer::Buffer
Buffer(Buffer &&buf) CL_HPP_NOEXCEPT_
Move constructor to forward move to the superclass correctly. Required for MSVC.
Definition: cl2.hpp:3979
cl::Pipe::operator=
Pipe & operator=(const cl_mem &rhs)
Assignment from cl_mem - performs shallow copy.
Definition: cl2.hpp:5500
cl::Buffer::Buffer
Buffer(const Context &context, cl_mem_flags flags, size_type size, void *host_ptr=NULL, cl_int *err=NULL)
Constructs a Buffer in a specified context.
Definition: cl2.hpp:3828
cl::ImageFormat
Adds constructors and member functions for cl_image_format.
Definition: cl2.hpp:2044
cl::Image::Image
Image(Image &&img) CL_HPP_NOEXCEPT_
Move constructor to forward move to the superclass correctly. Required for MSVC.
Definition: cl2.hpp:4372
cl::Program::Program
Program(const Context &context, const vector< char > &IL, bool build=false, cl_int *err=NULL)
Definition: cl2.hpp:6384
cl::Context::getInfo
detail::param_traits< detail::cl_context_info, name >::param_type getInfo(cl_int *err=NULL) const
Wrapper for clGetContextInfo() that returns by value.
Definition: cl2.hpp:3042
cl::Buffer::Buffer
Buffer()
Default constructor - initializes to NULL.
Definition: cl2.hpp:3940
cl::Image1D::Image1D
Image1D(const cl_mem &image1D, bool retainObject=false)
Constructor from cl_mem - takes ownership.
Definition: cl2.hpp:4462
cl::CommandQueue::enqueueBarrierWithWaitList
cl_int enqueueBarrierWithWaitList(const vector< Event > *events=0, Event *event=0) const
Definition: cl2.hpp:8283
cl::Image2DGL::Image2DGL
Image2DGL(const Context &context, cl_mem_flags flags, cl_GLenum target, cl_GLint miplevel, cl_GLuint texobj, cl_int *err=NULL)
Constructs an Image2DGL in a specified context, from a given GL Texture.
Definition: cl2.hpp:4939
cl::Memory
Class interface for cl_mem.
Definition: cl2.hpp:3328
cl::Program::setSpecializationConstant
cl_int setSpecializationConstant(cl_uint index, size_type size, const void *value)
Sets a SPIR-V specialization constant.
Definition: cl2.hpp:6815
cl::Sampler::Sampler
Sampler(const cl_sampler &sampler, bool retainObject=false)
Constructor from cl_sampler - takes ownership.
Definition: cl2.hpp:5625
cl::Image2DGL::Image2DGL
Image2DGL(const Image2DGL &img)
Copy constructor to forward copy to the superclass correctly. Required for MSVC.
Definition: cl2.hpp:4989
cl::ImageGL::ImageGL
ImageGL(const cl_mem &image, bool retainObject=false)
Constructor from cl_mem - takes ownership.
Definition: cl2.hpp:5379
cl::Device::Device
Device(const Device &dev)
Copy constructor to forward copy to the superclass correctly. Required for MSVC.
Definition: cl2.hpp:2161
cl::Image2DArray::Image2DArray
Image2DArray(const cl_mem &imageArray, bool retainObject=false)
Constructor from cl_mem - takes ownership.
Definition: cl2.hpp:5071
cl::Event::waitForEvents
static cl_int waitForEvents(const vector< Event > &events)
Blocks the calling thread until every event specified is complete.
Definition: cl2.hpp:3254
cl::NDRange::NDRange
NDRange(size_type size0, size_type size1)
Constructs two-dimensional range.
Definition: cl2.hpp:5723
cl::CommandQueue::CommandQueue
CommandQueue(const Context &context, cl_command_queue_properties properties=0, cl_int *err=NULL)
Constructs a CommandQueue for an implementation defined device in the given context Will return an CL...
Definition: cl2.hpp:7186
cl::NDRange::NDRange
NDRange()
Default constructor - resulting range has zero dimensions.
Definition: cl2.hpp:5705
cl::CommandQueue::enqueueFillBuffer
cl_int enqueueFillBuffer(const Buffer &buffer, PatternType pattern, size_type offset, size_type size, const vector< Event > *events=NULL, Event *event=NULL) const
Definition: cl2.hpp:7711
cl::Buffer::Buffer
Buffer(const cl_mem &buffer, bool retainObject=false)
Constructor from cl_mem - takes ownership.
Definition: cl2.hpp:3949
cl::DeviceCommandQueue
DeviceCommandQueue interface for device cl_command_queues.
Definition: cl2.hpp:8742
cl::Image2D::Image2D
Image2D(const Context &context, cl_mem_flags flags, ImageFormat format, size_type width, size_type height, size_type row_pitch=0, void *host_ptr=NULL, cl_int *err=NULL)
Constructs a 2D Image in a specified context.
Definition: cl2.hpp:4693
cl::SVMTraitReadOnly
Definition: cl2.hpp:3477
cl::Image2DArray::Image2DArray
Image2DArray(const Image2DArray &img)
Copy constructor to forward copy to the superclass correctly. Required for MSVC.
Definition: cl2.hpp:5082
cl::Pipe::Pipe
Pipe()
Default constructor - initializes to NULL.
Definition: cl2.hpp:5484
cl::NDRange::NDRange
NDRange(size_type size0)
Constructs one-dimensional range.
Definition: cl2.hpp:5714
cl::Buffer::Buffer
Buffer(cl_mem_flags flags, size_type size, void *host_ptr=NULL, cl_int *err=NULL)
Constructs a Buffer in the default context.
Definition: cl2.hpp:3853
cl::Image2DGL::Image2DGL
Image2DGL(const cl_mem &image, bool retainObject=false)
Constructor from cl_mem - takes ownership.
Definition: cl2.hpp:4973
cl::UnloadCompiler
CL_EXT_PREFIX__VERSION_1_1_DEPRECATED cl_int UnloadCompiler() CL_EXT_SUFFIX__VERSION_1_1_DEPRECATED
Definition: cl2.hpp:2721
cl::Platform::getInfo
detail::param_traits< detail::cl_platform_info, name >::param_type getInfo(cl_int *err=NULL) const
Wrapper for clGetPlatformInfo() that returns by value.
Definition: cl2.hpp:2485
cl::Sampler::getInfo
cl_int getInfo(cl_sampler_info name, T *param) const
Wrapper for clGetSamplerInfo().
Definition: cl2.hpp:5669
cl::Kernel::Kernel
Kernel()
Default constructor - initializes to NULL.
Definition: cl2.hpp:5840
cl::Memory::operator=
Memory & operator=(const cl_mem &rhs)
Assignment operator from cl_mem - takes ownership.
Definition: cl2.hpp:3352
cl::Memory::setDestructorCallback
cl_int setDestructorCallback(void(CL_CALLBACK *pfn_notify)(cl_mem, void *), void *user_data=NULL)
Registers a callback function to be called when the memory object is no longer needed.
Definition: cl2.hpp:3424
cl::CommandQueue::enqueueMigrateSVM
cl_int enqueueMigrateSVM(const cl::vector< cl::vector< T, Alloc >> &svmContainers, const cl::vector< size_type > &sizes, cl_mem_migration_flags flags=0, const vector< Event > *events=NULL, Event *event=NULL) const
Definition: cl2.hpp:8429
cl::Device::operator=
Device & operator=(const cl_device_id &rhs)
Assignment operator from cl_device_id.
Definition: cl2.hpp:2152
cl::Image3DGL::Image3DGL
Image3DGL(const Context &context, cl_mem_flags flags, cl_GLenum target, cl_GLint miplevel, cl_GLuint texobj, cl_int *err=NULL)
Constructs an Image3DGL in a specified context, from a given GL Texture.
Definition: cl2.hpp:5261
cl::Event::getProfilingInfo
cl_int getProfilingInfo(cl_profiling_info name, T *param) const
Wrapper for clGetEventProfilingInfo().
Definition: cl2.hpp:3197
cl::BufferRenderGL::BufferRenderGL
BufferRenderGL()
Default constructor - initializes to NULL.
Definition: cl2.hpp:4262
cl::Kernel::setArg
std::enable_if<!std::is_pointer< T >::value, cl_int >::type setArg(cl_uint index, const T &value)
setArg overload taking a POD type
Definition: cl2.hpp:6030
cl::Sampler::operator=
Sampler & operator=(const cl_sampler &rhs)
Assignment operator from cl_sampler - takes ownership.
Definition: cl2.hpp:5633
cl::Context::Context
Context(cl_device_type type, cl_context_properties *properties=NULL, void(CL_CALLBACK *notifyFptr)(const char *, const void *, size_type, void *)=NULL, void *data=NULL, cl_int *err=NULL)
Constructs a context including all or a subset of devices of a specified type.
Definition: cl2.hpp:2868
cl::Sampler::Sampler
Sampler(const Context &context, cl_bool normalized_coords, cl_addressing_mode addressing_mode, cl_filter_mode filter_mode, cl_int *err=NULL)
Constructs a Sampler in a specified context.
Definition: cl2.hpp:5578
cl::SVMAllocator
Definition: cl2.hpp:3553
cl::Program::setReleaseCallback
cl_int setReleaseCallback(void(CL_CALLBACK *pfn_notify)(cl_program program, void *user_data), void *user_data=NULL)
Registers a callback function to be called when destructors for program scope global variables are co...
Definition: cl2.hpp:6782
cl::ImageFormat::ImageFormat
ImageFormat(cl_channel_order order, cl_channel_type type)
Initializing constructor.
Definition: cl2.hpp:2049
cl::Image::getImageInfo
cl_int getImageInfo(cl_image_info name, T *param) const
Wrapper for clGetImageInfo().
Definition: cl2.hpp:4387
cl::CommandQueue::CommandQueue
CommandQueue(const Context &context, const Device &device, cl_command_queue_properties properties=0, cl_int *err=NULL)
Constructs a CommandQueue for a passed device and context Will return an CL_INVALID_QUEUE_PROPERTIES ...
Definition: cl2.hpp:7315
cl::Platform::get
static Platform get(cl_int *errResult=NULL)
Gets the first available platform, returning it by value.
Definition: cl2.hpp:2684
cl::Kernel
Class interface for cl_kernel.
Definition: cl2.hpp:5835
cl::CommandQueue::enqueueMapSVM
cl_int enqueueMapSVM(cl::pointer< T, D > &ptr, cl_bool blocking, cl_map_flags flags, size_type size, const vector< Event > *events=NULL, Event *event=NULL) const
Definition: cl2.hpp:8093
cl::SVMAllocator::operator==
bool operator==(SVMAllocator const &rhs)
Definition: cl2.hpp:3687
cl::Program::Program
Program(const Context &context, const vector< Device > &devices, const Binaries &binaries, vector< cl_int > *binaryStatus=NULL, cl_int *err=NULL)
Definition: cl2.hpp:6452
cl::CommandQueue::enqueueMarker
CL_EXT_PREFIX__VERSION_1_1_DEPRECATED cl_int enqueueMarker(Event *event=NULL) const CL_EXT_SUFFIX__VERSION_1_1_DEPRECATED
Definition: cl2.hpp:8549
cl::BufferRenderGL::getObjectInfo
cl_int getObjectInfo(cl_gl_object_type *type, cl_GLuint *gl_object_name)
Wrapper for clGetGLObjectInfo().
Definition: cl2.hpp:4313
cl::Kernel::enableFineGrainedSystemSVM
cl_int enableFineGrainedSystemSVM(bool svmEnabled)
Enable fine-grained system SVM.
Definition: cl2.hpp:6089
cl::Image1DBuffer::Image1DBuffer
Image1DBuffer(const cl_mem &image1D, bool retainObject=false)
Constructor from cl_mem - takes ownership.
Definition: cl2.hpp:4550
cl::Pipe::Pipe
Pipe(const Context &context, cl_uint packet_size, cl_uint max_packets, cl_int *err=NULL)
Constructs a Pipe in a specified context.
Definition: cl2.hpp:5440
cl::Context::operator=
Context & operator=(const Context &ctx)
Copy assignment to forward copy to the superclass correctly. Required for MSVC.
Definition: cl2.hpp:2959
cl::ImageFormat::ImageFormat
ImageFormat()
Default constructor - performs no initialization.
Definition: cl2.hpp:2046
cl::CommandQueue::enqueueUnmapSVM
cl_int enqueueUnmapSVM(T *ptr, const vector< Event > *events=NULL, Event *event=NULL) const
Definition: cl2.hpp:8170
cl::Kernel::setArg
cl_int setArg(cl_uint index, const cl::pointer< T, D > &argPtr)
setArg overload taking a shared_ptr type
Definition: cl2.hpp:5997
cl::Platform::operator=
Platform & operator=(const cl_platform_id &rhs)
Assignment operator from cl_platform_id.
Definition: cl2.hpp:2442
cl::BufferGL::getObjectInfo
cl_int getObjectInfo(cl_gl_object_type *type, cl_GLuint *gl_object_name)
Wrapper for clGetGLObjectInfo().
Definition: cl2.hpp:4216
cl::Event::wait
cl_int wait() const
Blocks the calling thread until this event completes.
Definition: cl2.hpp:3222
cl::Sampler::Sampler
Sampler(Sampler &&sam) CL_HPP_NOEXCEPT_
Move constructor to forward move to the superclass correctly. Required for MSVC.
Definition: cl2.hpp:5656
cl::Context::getSupportedImageFormats
cl_int getSupportedImageFormats(cl_mem_flags flags, cl_mem_object_type type, vector< ImageFormat > *formats) const
Gets a list of supported image formats.
Definition: cl2.hpp:3057
cl::BufferGL::BufferGL
BufferGL(const BufferGL &buf)
Copy constructor to forward copy to the superclass correctly. Required for MSVC.
Definition: cl2.hpp:4190
cl::Kernel::setArg
cl_int setArg(cl_uint index, const cl::vector< T, Alloc > &argPtr)
setArg overload taking a vector type.
Definition: cl2.hpp:6007
cl::Memory::Memory
Memory()
Default constructor - initializes to NULL.
Definition: cl2.hpp:3331
cl::compatibility::make_kernel::type_
Event type_(const EnqueueArgs &, Ts...)
Function signature of kernel functor with no event dependency.
Definition: cl2.hpp:10099
cl::Image
C++ base class for Image Memory objects.
Definition: cl2.hpp:4330
cl::atomic_svm_vector
vector< T, cl::SVMAllocator< int, cl::SVMTraitAtomic<> >> atomic_svm_vector
Vector alias to simplify contruction of fine-grained SVM containers that support platform atomics.
Definition: cl2.hpp:3806
cl::Context::Context
Context(const vector< Device > &devices, cl_context_properties *properties=NULL, void(CL_CALLBACK *notifyFptr)(const char *, const void *, size_type, void *)=NULL, void *data=NULL, cl_int *err=NULL)
Constructs a context including a list of specified devices.
Definition: cl2.hpp:2807
cl::Pipe::Pipe
Pipe(Pipe &&pipe) CL_HPP_NOEXCEPT_
Move constructor to forward move to the superclass correctly. Required for MSVC.
Definition: cl2.hpp:5523
cl
The OpenCL C++ bindings are defined within this namespace.
Definition: cl2.hpp:584
cl::BufferRenderGL
Class interface for GL Render Buffer Memory Objects.
Definition: cl2.hpp:4235
cl::SVMTraitReadWrite
Definition: cl2.hpp:3466
cl::Device
Class interface for cl_device_id.
Definition: cl2.hpp:2074
cl::Device::Device
Device()
Default constructor - initializes to NULL.
Definition: cl2.hpp:2110
cl::ImageFormat::operator=
ImageFormat & operator=(const ImageFormat &rhs)
Assignment operator.
Definition: cl2.hpp:2056
cl::KernelFunctor::result_type
Event result_type
Return type of the functor.
Definition: cl2.hpp:10002
cl::ImageGL
general image interface for GL interop. We abstract the 2D and 3D GL images into a single instance he...
Definition: cl2.hpp:5345
cl::CommandQueue::CommandQueue
CommandQueue(const Context &context, QueueProperties properties, cl_int *err=NULL)
Constructs a CommandQueue for an implementation defined device in the given context Will return an CL...
Definition: cl2.hpp:7253
cl::Event::operator=
Event & operator=(const cl_event &rhs)
Assignment operator from cl_event - takes ownership.
Definition: cl2.hpp:3166
cl::Image1DBuffer::Image1DBuffer
Image1DBuffer(Image1DBuffer &&img) CL_HPP_NOEXCEPT_
Move constructor to forward move to the superclass correctly. Required for MSVC.
Definition: cl2.hpp:4576
cl::Platform::setDefault
static Platform setDefault(const Platform &default_platform)
Definition: cl2.hpp:2466
cl::Image3D::Image3D
Image3D(const Image3D &img)
Copy constructor to forward copy to the superclass correctly. Required for MSVC.
Definition: cl2.hpp:5218
cl::Image::Image
Image()
Default constructor - initializes to NULL.
Definition: cl2.hpp:4333
cl::Program::Program
Program(const vector< char > &IL, bool build=false, cl_int *err=NULL)
Definition: cl2.hpp:6329
cl::Image3D
Class interface for 3D Image Memory objects.
Definition: cl2.hpp:5116
cl::Platform::Platform
Platform(const cl_platform_id &platform, bool retainObject=false)
Constructor from cl_platform_id.
Definition: cl2.hpp:2435
cl::Platform::get
static cl_int get(Platform *platform)
Gets the first available platform.
Definition: cl2.hpp:2665
cl::Kernel::operator=
Kernel & operator=(const cl_kernel &rhs)
Assignment operator from cl_kernel - takes ownership.
Definition: cl2.hpp:5858
cl::BufferRenderGL::BufferRenderGL
BufferRenderGL(BufferRenderGL &&buf) CL_HPP_NOEXCEPT_
Move constructor to forward move to the superclass correctly. Required for MSVC.
Definition: cl2.hpp:4301
cl::Image::Image
Image(const cl_mem &image, bool retainObject=false)
Constructor from cl_mem - takes ownership.
Definition: cl2.hpp:4342
cl::Platform::getDevices
cl_int getDevices(cl_device_type type, vector< Device > *devices) const
Gets a list of devices for this platform.
Definition: cl2.hpp:2500
cl::CommandQueue::enqueueMigrateSVM
cl_int enqueueMigrateSVM(const cl::vector< T * > &svmRawPointers, cl_mem_migration_flags flags=0, const vector< Event > *events=NULL, Event *event=NULL) const
Definition: cl2.hpp:8376
cl::Pipe::Pipe
Pipe(const Pipe &pipe)
Copy constructor to forward copy to the superclass correctly. Required for MSVC.
Definition: cl2.hpp:5509
cl::KernelFunctor
Definition: cl2.hpp:9967
cl::unmapSVM
cl_int unmapSVM(cl::vector< T, Alloc > &container)
Definition: cl2.hpp:9496
cl::Context::Context
Context(const cl_context &context, bool retainObject=false)
Constructor from cl_context - takes ownership.
Definition: cl2.hpp:3016
cl::Image2D
Class interface for 2D Image Memory objects.
Definition: cl2.hpp:4687
cl::Image1D
Class interface for 1D Image Memory objects.
Definition: cl2.hpp:4417
cl::DeviceCommandQueue::makeDefault
static DeviceCommandQueue makeDefault(const Context &context, const Device &device, cl_int *err=nullptr)
Definition: cl2.hpp:8925
cl::Image3DGL::Image3DGL
Image3DGL(Image3DGL &&img) CL_HPP_NOEXCEPT_
Move constructor to forward move to the superclass correctly. Required for MSVC.
Definition: cl2.hpp:5324
cl::Platform::getInfo
cl_int getInfo(cl_platform_info name, T *param) const
Wrapper for clGetPlatformInfo().
Definition: cl2.hpp:2475
cl::CommandQueue::enqueueUnmapSVM
cl_int enqueueUnmapSVM(cl::pointer< T, D > &ptr, const vector< Event > *events=NULL, Event *event=NULL) const
Definition: cl2.hpp:8195
cl::detail::GetInfoFunctor1
Definition: cl2.hpp:1551
cl::Image2DGL::Image2DGL
Image2DGL()
Default constructor - initializes to NULL.
Definition: cl2.hpp:4964
cl::Device::setDefault
static Device setDefault(const Device &default_device)
Definition: cl2.hpp:2141
cl::Memory::Memory
Memory(Memory &&mem) CL_HPP_NOEXCEPT_
Move constructor to forward move to the superclass correctly. Required for MSVC.
Definition: cl2.hpp:3375
cl::detail::Deleter
Definition: cl2.hpp:3720
cl::detail::ReferenceHandler
Definition: cl2.hpp:1577
cl::CommandQueue::CommandQueue
CommandQueue(CommandQueue &&queue) CL_HPP_NOEXCEPT_
Move constructor to forward move to the superclass correctly. Required for MSVC.
Definition: cl2.hpp:7476
cl::Context::Context
Context(const Context &ctx)
Copy constructor to forward copy to the superclass correctly. Required for MSVC.
Definition: cl2.hpp:2954
cl::CommandQueue::setDefault
static CommandQueue setDefault(const CommandQueue &default_queue)
Definition: cl2.hpp:7434
cl::KernelFunctor::operator()
Event operator()(const EnqueueArgs &args, Ts... ts)
Definition: cl2.hpp:10009
cl::SVMTraitWriteOnly
Definition: cl2.hpp:3488
cl::Image1DArray::Image1DArray
Image1DArray(Image1DArray &&img) CL_HPP_NOEXCEPT_
Move constructor to forward move to the superclass correctly. Required for MSVC.
Definition: cl2.hpp:4665
cl::Buffer::Buffer
Buffer(IteratorType startIterator, IteratorType endIterator, bool readOnly, bool useHostPtr=false, cl_int *err=NULL)
Construct a Buffer from a host container via iterators. IteratorType must be random access....
Definition: cl2.hpp:3877
cl::BufferGL::BufferGL
BufferGL(BufferGL &&buf) CL_HPP_NOEXCEPT_
Move constructor to forward move to the superclass correctly. Required for MSVC.
Definition: cl2.hpp:4204
cl::Device::getDefault
static Device getDefault(cl_int *errResult=NULL)
Returns the first device on the default context.
Definition: cl2.hpp:2123
cl::CommandQueue::enqueueUnmapSVM
cl_int enqueueUnmapSVM(cl::vector< T, Alloc > &container, const vector< Event > *events=NULL, Event *event=NULL) const
Definition: cl2.hpp:8220
cl::Sampler::getInfo
detail::param_traits< detail::cl_sampler_info, name >::param_type getInfo(cl_int *err=NULL) const
Wrapper for clGetSamplerInfo() that returns by value.
Definition: cl2.hpp:5679
cl::Image2DArray::Image2DArray
Image2DArray(Image2DArray &&img) CL_HPP_NOEXCEPT_
Move constructor to forward move to the superclass correctly. Required for MSVC.
Definition: cl2.hpp:5096
cl::Event::Event
Event()
Default constructor - initializes to NULL.
Definition: cl2.hpp:3148
cl::Kernel::setSVMPointers
cl_int setSVMPointers(const std::array< void *, ArrayLength > &pointerList)
Definition: cl2.hpp:6068
cl::Image1DArray::Image1DArray
Image1DArray(const cl_mem &imageArray, bool retainObject=false)
Constructor from cl_mem - takes ownership.
Definition: cl2.hpp:4638
cl::Platform
Class interface for cl_platform_id.
Definition: cl2.hpp:2354
cl::Image3D::Image3D
Image3D(const cl_mem &image3D, bool retainObject=false)
Constructor from cl_mem - takes ownership.
Definition: cl2.hpp:5202
cl::detail::SVMTraitNull
Definition: cl2.hpp:3455
cl::CommandQueue::enqueueMigrateMemObjects
cl_int enqueueMigrateMemObjects(const vector< Memory > &memObjects, cl_mem_migration_flags flags, const vector< Event > *events=NULL, Event *event=NULL) const
Definition: cl2.hpp:8306
cl::Image3DGL
Class interface for GL 3D Image Memory objects.
Definition: cl2.hpp:5254
cl::enqueueMapSVM
cl_int enqueueMapSVM(T *ptr, cl_bool blocking, cl_map_flags flags, size_type size, const vector< Event > *events=NULL, Event *event=NULL)
Definition: cl2.hpp:9198
cl::DeviceCommandQueue::makeDefault
static DeviceCommandQueue makeDefault(const Context &context, const Device &device, cl_uint queueSize, cl_int *err=nullptr)
Definition: cl2.hpp:8953
cl::CommandQueue::enqueueMapSVM
cl_int enqueueMapSVM(cl::vector< T, Alloc > &container, cl_bool blocking, cl_map_flags flags, const vector< Event > *events=NULL, Event *event=NULL) const
Definition: cl2.hpp:8120
cl::DeviceCommandQueue::DeviceCommandQueue
DeviceCommandQueue(DeviceQueueProperties properties, cl_int *err=NULL)
Definition: cl2.hpp:8753
cl::Image3DGL::Image3DGL
Image3DGL()
Default constructor - initializes to NULL.
Definition: cl2.hpp:5285
cl::Context::getInfo
cl_int getInfo(cl_context_info name, T *param) const
Wrapper for clGetContextInfo().
Definition: cl2.hpp:3032
cl::coarse_svm_vector
vector< T, cl::SVMAllocator< int, cl::SVMTraitCoarse<> >> coarse_svm_vector
Vector alias to simplify contruction of coarse-grained SVM containers.
Definition: cl2.hpp:3794
cl::CommandQueue::enqueueMigrateSVM
cl_int enqueueMigrateSVM(const cl::vector< T * > &svmRawPointers, const cl::vector< size_type > &sizes, cl_mem_migration_flags flags=0, const vector< Event > *events=NULL, Event *event=NULL) const
Definition: cl2.hpp:8347
cl::ImageGL::ImageGL
ImageGL(const ImageGL &img)
Copy constructor to forward copy to the superclass correctly. Required for MSVC.
Definition: cl2.hpp:5391
cl::Event
Class interface for cl_event.
Definition: cl2.hpp:3145
cl::EnqueueArgs
Definition: cl2.hpp:9776
cl::Kernel::Kernel
Kernel(const cl_kernel &kernel, bool retainObject=false)
Constructor from cl_kernel - takes ownership.
Definition: cl2.hpp:5850
cl::Local
LocalSpaceArg Local(size_type size)
Helper function for generating LocalSpaceArg objects.
Definition: cl2.hpp:5820
cl::Kernel::Kernel
Kernel(Kernel &&kernel) CL_HPP_NOEXCEPT_
Move constructor to forward move to the superclass correctly. Required for MSVC.
Definition: cl2.hpp:5881
cl::Pipe::Pipe
Pipe(const cl_mem &pipe, bool retainObject=false)
Constructor from cl_mem - takes ownership.
Definition: cl2.hpp:5493
cl::CommandQueue::CommandQueue
CommandQueue(const Context &context, const Device &device, QueueProperties properties, cl_int *err=NULL)
Constructs a CommandQueue for a passed device and context Will return an CL_INVALID_QUEUE_PROPERTIES ...
Definition: cl2.hpp:7366
cl::Image::getImageInfo
detail::param_traits< detail::cl_image_info, name >::param_type getImageInfo(cl_int *err=NULL) const
Wrapper for clGetImageInfo() that returns by value.
Definition: cl2.hpp:4397
cl::CommandQueue::enqueueMapSVM
cl_int enqueueMapSVM(T *ptr, cl_bool blocking, cl_map_flags flags, size_type size, const vector< Event > *events=NULL, Event *event=NULL) const
Definition: cl2.hpp:8065
cl::Program::getBuildInfo
vector< std::pair< cl::Device, typename detail::param_traits< detail::cl_program_build_info, name >::param_type > > getBuildInfo(cl_int *err=NULL) const
Definition: cl2.hpp:6704
cl::SVMAllocator::allocate
pointer allocate(size_type size, typename cl::SVMAllocator< void, SVMTrait >::const_pointer=0)
Definition: cl2.hpp:3617
cl::detail::param_traits
Definition: cl2.hpp:1396
cl::UserEvent::UserEvent
UserEvent()
Default constructor - initializes to NULL.
Definition: cl2.hpp:3291
cl::Memory::getInfo
cl_int getInfo(cl_mem_info name, T *param) const
Wrapper for clGetMemObjectInfo().
Definition: cl2.hpp:3389
cl::DeviceCommandQueue::DeviceCommandQueue
DeviceCommandQueue(DeviceCommandQueue &&queue) CL_HPP_NOEXCEPT_
Move constructor to forward move to the superclass correctly. Required for MSVC.
Definition: cl2.hpp:8856
cl::BufferGL::BufferGL
BufferGL(const cl_mem &buffer, bool retainObject=false)
Constructor from cl_mem - takes ownership.
Definition: cl2.hpp:4174
cl::Program::setSpecializationConstant
std::enable_if<!std::is_pointer< T >::value, cl_int >::type setSpecializationConstant(cl_uint index, const T &value)
Sets a SPIR-V specialization constant.
Definition: cl2.hpp:6800
cl::DeviceCommandQueue::DeviceCommandQueue
DeviceCommandQueue()
Definition: cl2.hpp:8748
cl::NDRange::NDRange
NDRange(size_type size0, size_type size1, size_type size2)
Constructs three-dimensional range.
Definition: cl2.hpp:5732
cl::detail::Wrapper
Definition: cl2.hpp:1754
cl::BufferGL
Class interface for GL Buffer Memory Objects.
Definition: cl2.hpp:4138
cl::Event::getInfo
cl_int getInfo(cl_event_info name, T *param) const
Wrapper for clGetEventInfo().
Definition: cl2.hpp:3174
cl::detail::GetInfoFunctor0
Definition: cl2.hpp:1542
cl::Buffer::createSubBuffer
Buffer createSubBuffer(cl_mem_flags flags, cl_buffer_create_type buffer_create_type, const void *buffer_create_info, cl_int *err=NULL)
Creates a new buffer object from this.
Definition: cl2.hpp:3995
cl::Memory::getInfo
detail::param_traits< detail::cl_mem_info, name >::param_type getInfo(cl_int *err=NULL) const
Wrapper for clGetMemObjectInfo() that returns by value.
Definition: cl2.hpp:3399
cl::CommandQueue::enqueueFillImage
cl_int enqueueFillImage(const Image &image, cl_float4 fillColor, const array< size_type, 3 > &origin, const array< size_type, 3 > &region, const vector< Event > *events=NULL, Event *event=NULL) const
Definition: cl2.hpp:7843
cl::CommandQueue::enqueueBarrier
CL_EXT_PREFIX__VERSION_1_1_DEPRECATED cl_int enqueueBarrier() const CL_EXT_SUFFIX__VERSION_1_1_DEPRECATED
Definition: cl2.hpp:8702
cl::Image2D::Image2D
Image2D(const cl_mem &image2D, bool retainObject=false)
Constructor from cl_mem - takes ownership.
Definition: cl2.hpp:4877
cl::allocate_pointer
cl::pointer< T, detail::Deleter< Alloc > > allocate_pointer(const Alloc &alloc_, Args &&... args)
Definition: cl2.hpp:3747
cl::Device::Device
Device(const cl_device_id &device, bool retainObject=false)
Constructor from cl_device_id.
Definition: cl2.hpp:2116
cl::Image2DArray
Image interface for arrays of 2D images.
Definition: cl2.hpp:5022
cl::Image1DArray
Image interface for arrays of 1D images.
Definition: cl2.hpp:4593
cl::Kernel::Kernel
Kernel(const Kernel &kernel)
Copy constructor to forward copy to the superclass correctly. Required for MSVC.
Definition: cl2.hpp:5867
cl::BufferRenderGL::BufferRenderGL
BufferRenderGL(const cl_mem &buffer, bool retainObject=false)
Constructor from cl_mem - takes ownership.
Definition: cl2.hpp:4271
cl::DeviceCommandQueue::updateDefault
static DeviceCommandQueue updateDefault(const Context &context, const Device &device, const DeviceCommandQueue &default_queue, cl_int *err=nullptr)
Definition: cl2.hpp:8985
cl::NDRange
Class interface for specifying NDRange values.
Definition: cl2.hpp:5698
cl::UserEvent::UserEvent
UserEvent(const Context &context, cl_int *err=NULL)
Constructs a user event on a given context.
Definition: cl2.hpp:3275
cl::Device::createSubDevices
cl_int createSubDevices(const cl_device_partition_property *properties, vector< Device > *devices)
Wrapper for clCreateSubDevices().
Definition: cl2.hpp:2261
cl::Event::getInfo
detail::param_traits< detail::cl_event_info, name >::param_type getInfo(cl_int *err=NULL) const
Wrapper for clGetEventInfo() that returns by value.
Definition: cl2.hpp:3184
cl::ImageGL::ImageGL
ImageGL(ImageGL &&img) CL_HPP_NOEXCEPT_
Move constructor to forward move to the superclass correctly. Required for MSVC.
Definition: cl2.hpp:5405
cl::detail::ReferenceHandler< cl_device_id >::release
static cl_int release(cl_device_id device)
Definition: cl2.hpp:1606
cl::CommandQueue
CommandQueue interface for cl_command_queue.
Definition: cl2.hpp:6992
cl::Image::operator=
Image & operator=(const cl_mem &rhs)
Assignment from cl_mem - performs shallow copy.
Definition: cl2.hpp:4349
cl::compatibility::make_kernel
Definition: cl2.hpp:10078
cl::UserEvent::setStatus
cl_int setStatus(cl_int status)
Sets the execution status of a user event object.
Definition: cl2.hpp:3297
cl::copy
cl_int copy(const CommandQueue &queue, const cl::Buffer &buffer, IteratorType startIterator, IteratorType endIterator)
Definition: cl2.hpp:9456
cl::Buffer::operator=
Buffer & operator=(const cl_mem &rhs)
Assignment from cl_mem - performs shallow copy.
Definition: cl2.hpp:3956
cl::BufferGL::operator=
BufferGL & operator=(const cl_mem &rhs)
Assignment from cl_mem - performs shallow copy.
Definition: cl2.hpp:4181
cl::BufferGL::BufferGL
BufferGL()
Default constructor - initializes to NULL.
Definition: cl2.hpp:4165
cl::CommandQueue::enqueueMarkerWithWaitList
cl_int enqueueMarkerWithWaitList(const vector< Event > *events=0, Event *event=0) const
Definition: cl2.hpp:8253
cl::CommandQueue::CommandQueue
CommandQueue(QueueProperties properties, cl_int *err=NULL)
Constructs a CommandQueue based on passed properties. Will return an CL_INVALID_QUEUE_PROPERTIES erro...
Definition: cl2.hpp:7123
cl::Image1D::operator=
Image1D & operator=(const cl_mem &rhs)
Assignment from cl_mem - performs shallow copy.
Definition: cl2.hpp:4469
cl::Image1D::Image1D
Image1D()
Default constructor - initializes to NULL.
Definition: cl2.hpp:4453
cl::BufferRenderGL::operator=
BufferRenderGL & operator=(const cl_mem &rhs)
Assignment from cl_mem - performs shallow copy.
Definition: cl2.hpp:4278
cl::CommandQueue::CommandQueue
CommandQueue(cl_command_queue_properties properties, cl_int *err=NULL)
Constructs a CommandQueue based on passed properties. Will return an CL_INVALID_QUEUE_PROPERTIES erro...
Definition: cl2.hpp:7057
cl::Context::Context
Context()
Default constructor - initializes to NULL.
Definition: cl2.hpp:3009
cl::Device::getInfo
cl_int getInfo(cl_device_info name, T *param) const
Wrapper for clGetDeviceInfo().
Definition: cl2.hpp:2188
cl::DeviceCommandQueue::DeviceCommandQueue
DeviceCommandQueue(const Context &context, const Device &device, cl_uint queueSize, DeviceQueueProperties properties=DeviceQueueProperties::None, cl_int *err=NULL)
Definition: cl2.hpp:8800
cl::Image1D::Image1D
Image1D(const Context &context, cl_mem_flags flags, ImageFormat format, size_type width, void *host_ptr=NULL, cl_int *err=NULL)
Constructs a 1D Image in a specified context.
Definition: cl2.hpp:4423
cl::Image2D::Image2D
Image2D(const Context &context, cl_channel_order order, const Image &sourceImage, cl_int *err=nullptr)
Constructs a 2D Image from an image.
Definition: cl2.hpp:4814
cl::Image3D::Image3D
Image3D(const Context &context, cl_mem_flags flags, ImageFormat format, size_type width, size_type height, size_type depth, size_type row_pitch=0, size_type slice_pitch=0, void *host_ptr=NULL, cl_int *err=NULL)
Constructs a 3D Image in a specified context.
Definition: cl2.hpp:5122
cl::Program::Program
Program(const Program &program)
Copy constructor to forward copy to the superclass correctly. Required for MSVC.
Definition: cl2.hpp:6566
cl::Image2DGL
Class interface for GL 2D Image Memory objects.
Definition: cl2.hpp:4932
cl::Image2D::Image2D
Image2D(const Image2D &img)
Copy constructor to forward copy to the superclass correctly. Required for MSVC.
Definition: cl2.hpp:4893
cl::Device::getHostTimer
cl_ulong getHostTimer(cl_int *error=nullptr)
Definition: cl2.hpp:2217
cl::detail::KernelArgumentHandler
Definition: cl2.hpp:5784
cl::Program::Program
Program(const Context &context, const Sources &sources, cl_int *err=NULL)
Definition: cl2.hpp:6292
cl::Image1DBuffer::Image1DBuffer
Image1DBuffer(const Image1DBuffer &img)
Copy constructor to forward copy to the superclass correctly. Required for MSVC.
Definition: cl2.hpp:4562
cl::SVMAllocator::max_size
size_type max_size() const CL_HPP_NOEXCEPT_
Definition: cl2.hpp:3659
cl::Context::setDefault
static Context setDefault(const Context &default_context)
Definition: cl2.hpp:3001